diff --git a/.gitignore b/.gitignore index 8d1aad947d3..678d19148fd 100644 --- a/.gitignore +++ b/.gitignore @@ -374,7 +374,6 @@ cppcheck-cppcheck-build-dir/ *index-sort-f.html *index-sort-l.html *index.html -MachineIndependent/ godot.info amber.png emerald.png diff --git a/COPYRIGHT.txt b/COPYRIGHT.txt index 6122452b88c..31f51c2854b 100644 --- a/COPYRIGHT.txt +++ b/COPYRIGHT.txt @@ -164,7 +164,8 @@ License: FTL Files: ./thirdparty/glslang/ Comment: glslang -Copyright: 2015-2018 Google, Inc. +Copyright: 2015-2020 Google, Inc. + 2014-2020, The Khronos Group Inc 2002, NVIDIA Corporation. License: glslang @@ -350,10 +351,10 @@ License: BSD-3-clause Files: ./thirdparty/vulkan/ Comment: Vulkan Ecosystem Components (Vulkan ICD loader and headers) -Copyright: 2014-2019, The Khronos Group Inc. - 2014-2019, Valve Corporation - 2014-2019, LunarG, Inc. - 2015-2019, Google Inc. +Copyright: 2014-2020, The Khronos Group Inc. + 2014-2020, Valve Corporation + 2014-2020, LunarG, Inc. + 2015-2020, Google Inc. License: Apache-2.0 Files: ./thirdparty/vulkan/vk_mem_alloc.h @@ -872,8 +873,8 @@ License: Expat License: glslang Here, glslang proper means core GLSL parsing, HLSL parsing, and SPIR-V code - generation. Glslang proper requires use of two licenses, one that covers - non-preprocessing and an additional one that covers preprocessing. + generation. Glslang proper requires use of a number of licenses, one that covers + preprocessing and others that covers non-preprocessing. . Bison was removed long ago. You can build glslang from the source grammar, using tools of your choice, without using bison or any bison files. @@ -896,43 +897,78 @@ License: glslang . The core of glslang-proper, minus the preprocessor is licenced as follows: . + -------------------------------------------------------------------------------- + 3-Clause BSD License + -------------------------------------------------------------------------------- + . Copyright (C) 2015-2018 Google, Inc. Copyright (C) . All rights reserved. . - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions - are met: - . - Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - . - Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - . - Neither the name of 3Dlabs Inc. Ltd. nor the names of its - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - . - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS - FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, - INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, - BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; - LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT - LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN - ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - POSSIBILITY OF SUCH DAMAGE. + See: . . -------------------------------------------------------------------------------- + 2-Clause BSD License + -------------------------------------------------------------------------------- . - The preprocessor has the core license stated above, plus an additional licence: + Copyright 2020 The Khronos Group Inc + . + See: . + . + -------------------------------------------------------------------------------- + The MIT License + -------------------------------------------------------------------------------- + . + Copyright 2020 The Khronos Group Inc + . + See: . + . + -------------------------------------------------------------------------------- + APACHE LICENSE, VERSION 2.0 + -------------------------------------------------------------------------------- + . + See: . + . + -------------------------------------------------------------------------------- + GPL 3 with special bison exception + -------------------------------------------------------------------------------- + . + Bison implementation for Yacc-like parsers in C + . + Copyright (C) 1984, 1989-1990, 2000-2015 Free Software Foundation, Inc. + . + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + . + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + . + You should have received a copy of the GNU General Public License + along with this program. If not, see . + . + As a special exception, you may create a larger work that contains + part or all of the Bison parser skeleton and distribute that work + under terms of your choice, so long as that work isn't itself a + parser generator using the skeleton or a modified version thereof + as a parser skeleton. Alternatively, if you modify or redistribute + the parser skeleton itself, you may (at your option) remove this + special exception, which will cause the skeleton and the resulting + Bison output files to be licensed under the GNU General Public + License without this special exception. + . + This special exception was added by the Free Software Foundation in + version 2.2 of Bison. + . + -------------------------------------------------------------------------------- + ================================================================================ + -------------------------------------------------------------------------------- + . + The preprocessor has the core licenses stated above, plus an additional licence: . Copyright (c) 2002, NVIDIA Corporation. . diff --git a/drivers/vulkan/SCsub b/drivers/vulkan/SCsub index 61d91711dab..13fcaf16d27 100644 --- a/drivers/vulkan/SCsub +++ b/drivers/vulkan/SCsub @@ -59,7 +59,6 @@ elif env["builtin_vulkan"]: if env["platform"] == "windows": loader_sources.append("dirent_on_windows.c") - loader_sources.append("dxgi_loader.c") env_thirdparty.AppendUnique( CPPDEFINES=[ "VK_USE_PLATFORM_WIN32_KHR", diff --git a/modules/glslang/SCsub b/modules/glslang/SCsub index c1d23a138b1..58c033c75dc 100644 --- a/modules/glslang/SCsub +++ b/modules/glslang/SCsub @@ -9,6 +9,7 @@ env_glslang = env_modules.Clone() if env["builtin_glslang"]: thirdparty_dir = "#thirdparty/glslang/" thirdparty_sources = [ + "glslang/CInterface/glslang_c_interface.cpp", "glslang/MachineIndependent/RemoveTree.cpp", "glslang/MachineIndependent/ParseHelper.cpp", "glslang/MachineIndependent/iomapper.cpp", @@ -30,7 +31,6 @@ if env["builtin_glslang"]: "glslang/MachineIndependent/intermOut.cpp", "glslang/MachineIndependent/SymbolTable.cpp", "glslang/MachineIndependent/glslang_tab.cpp", - "glslang/MachineIndependent/pch.cpp", "glslang/MachineIndependent/Versions.cpp", "glslang/MachineIndependent/ShaderLang.cpp", "glslang/MachineIndependent/parseConst.cpp", @@ -40,6 +40,7 @@ if env["builtin_glslang"]: "glslang/GenericCodeGen/Link.cpp", "glslang/GenericCodeGen/CodeGen.cpp", "OGLCompilersDLL/InitializeDll.cpp", + "SPIRV/CInterface/spirv_c_interface.cpp", "SPIRV/InReadableOrder.cpp", "SPIRV/GlslangToSpv.cpp", "SPIRV/SpvBuilder.cpp", @@ -49,6 +50,7 @@ if env["builtin_glslang"]: "SPIRV/SPVRemapper.cpp", "SPIRV/SpvPostProcess.cpp", "SPIRV/Logger.cpp", + "StandAlone/ResourceLimits.cpp", ] if env["platform"] == "windows": @@ -64,6 +66,8 @@ if env["builtin_glslang"]: else: env_glslang.Prepend(CPPPATH=[thirdparty_dir]) + env_glslang.Append(CPPDEFINES=["ENABLE_OPT=0"]) + env_thirdparty = env_glslang.Clone() env_thirdparty.disable_warnings() env_thirdparty.add_source_files(env.modules_sources, thirdparty_sources) diff --git a/modules/glslang/register_types.cpp b/modules/glslang/register_types.cpp index dd28c4ad4a5..3238e0108eb 100644 --- a/modules/glslang/register_types.cpp +++ b/modules/glslang/register_types.cpp @@ -33,115 +33,10 @@ #include "servers/rendering/rendering_device.h" #include +#include #include #include -static const TBuiltInResource default_builtin_resource = { - /*maxLights*/ 32, - /*maxClipPlanes*/ 6, - /*maxTextureUnits*/ 32, - /*maxTextureCoords*/ 32, - /*maxVertexAttribs*/ 64, - /*maxVertexUniformComponents*/ 4096, - /*maxVaryingFloats*/ 64, - /*maxVertexTextureImageUnits*/ 32, - /*maxCombinedTextureImageUnits*/ 80, - /*maxTextureImageUnits*/ 32, - /*maxFragmentUniformComponents*/ 4096, - /*maxDrawBuffers*/ 32, - /*maxVertexUniformVectors*/ 128, - /*maxVaryingVectors*/ 8, - /*maxFragmentUniformVectors*/ 16, - /*maxVertexOutputVectors*/ 16, - /*maxFragmentInputVectors*/ 15, - /*minProgramTexelOffset*/ -8, - /*maxProgramTexelOffset*/ 7, - /*maxClipDistances*/ 8, - /*maxComputeWorkGroupCountX*/ 65535, - /*maxComputeWorkGroupCountY*/ 65535, - /*maxComputeWorkGroupCountZ*/ 65535, - /*maxComputeWorkGroupSizeX*/ 1024, - /*maxComputeWorkGroupSizeY*/ 1024, - /*maxComputeWorkGroupSizeZ*/ 64, - /*maxComputeUniformComponents*/ 1024, - /*maxComputeTextureImageUnits*/ 16, - /*maxComputeImageUniforms*/ 8, - /*maxComputeAtomicCounters*/ 8, - /*maxComputeAtomicCounterBuffers*/ 1, - /*maxVaryingComponents*/ 60, - /*maxVertexOutputComponents*/ 64, - /*maxGeometryInputComponents*/ 64, - /*maxGeometryOutputComponents*/ 128, - /*maxFragmentInputComponents*/ 128, - /*maxImageUnits*/ 8, - /*maxCombinedImageUnitsAndFragmentOutputs*/ 8, - /*maxCombinedShaderOutputResources*/ 8, - /*maxImageSamples*/ 0, - /*maxVertexImageUniforms*/ 0, - /*maxTessControlImageUniforms*/ 0, - /*maxTessEvaluationImageUniforms*/ 0, - /*maxGeometryImageUniforms*/ 0, - /*maxFragmentImageUniforms*/ 8, - /*maxCombinedImageUniforms*/ 8, - /*maxGeometryTextureImageUnits*/ 16, - /*maxGeometryOutputVertices*/ 256, - /*maxGeometryTotalOutputComponents*/ 1024, - /*maxGeometryUniformComponents*/ 1024, - /*maxGeometryVaryingComponents*/ 64, - /*maxTessControlInputComponents*/ 128, - /*maxTessControlOutputComponents*/ 128, - /*maxTessControlTextureImageUnits*/ 16, - /*maxTessControlUniformComponents*/ 1024, - /*maxTessControlTotalOutputComponents*/ 4096, - /*maxTessEvaluationInputComponents*/ 128, - /*maxTessEvaluationOutputComponents*/ 128, - /*maxTessEvaluationTextureImageUnits*/ 16, - /*maxTessEvaluationUniformComponents*/ 1024, - /*maxTessPatchComponents*/ 120, - /*maxPatchVertices*/ 32, - /*maxTessGenLevel*/ 64, - /*maxViewports*/ 16, - /*maxVertexAtomicCounters*/ 0, - /*maxTessControlAtomicCounters*/ 0, - /*maxTessEvaluationAtomicCounters*/ 0, - /*maxGeometryAtomicCounters*/ 0, - /*maxFragmentAtomicCounters*/ 8, - /*maxCombinedAtomicCounters*/ 8, - /*maxAtomicCounterBindings*/ 1, - /*maxVertexAtomicCounterBuffers*/ 0, - /*maxTessControlAtomicCounterBuffers*/ 0, - /*maxTessEvaluationAtomicCounterBuffers*/ 0, - /*maxGeometryAtomicCounterBuffers*/ 0, - /*maxFragmentAtomicCounterBuffers*/ 1, - /*maxCombinedAtomicCounterBuffers*/ 1, - /*maxAtomicCounterBufferSize*/ 16384, - /*maxTransformFeedbackBuffers*/ 4, - /*maxTransformFeedbackInterleavedComponents*/ 64, - /*maxCullDistances*/ 8, - /*maxCombinedClipAndCullDistances*/ 8, - /*maxSamples*/ 4, - /*maxMeshOutputVerticesNV*/ 0, - /*maxMeshOutputPrimitivesNV*/ 0, - /*maxMeshWorkGroupSizeX_NV*/ 0, - /*maxMeshWorkGroupSizeY_NV*/ 0, - /*maxMeshWorkGroupSizeZ_NV*/ 0, - /*maxTaskWorkGroupSizeX_NV*/ 0, - /*maxTaskWorkGroupSizeY_NV*/ 0, - /*maxTaskWorkGroupSizeZ_NV*/ 0, - /*maxMeshViewCountNV*/ 0, - /*limits*/ { - /*nonInductiveForLoops*/ true, - /*whileLoops*/ true, - /*doWhileLoops*/ true, - /*generalUniformIndexing*/ true, - /*generalAttributeMatrixVectorIndexing*/ true, - /*generalVaryingIndexing*/ true, - /*generalSamplerIndexing*/ true, - /*generalVariableIndexing*/ true, - /*generalConstantMatrixVectorIndexing*/ true, - } -}; - static Vector _compile_shader_glsl(RenderingDevice::ShaderStage p_stage, const String &p_source_code, RenderingDevice::ShaderLanguage p_language, String *r_error) { Vector ret; @@ -175,7 +70,7 @@ static Vector _compile_shader_glsl(RenderingDevice::ShaderStage p_stage std::string pre_processed_code; //preprocess - if (!shader.preprocess(&default_builtin_resource, DefaultVersion, ENoProfile, false, false, messages, &pre_processed_code, includer)) { + if (!shader.preprocess(&glslang::DefaultTBuiltInResource, DefaultVersion, ENoProfile, false, false, messages, &pre_processed_code, includer)) { if (r_error) { (*r_error) = "Failed pre-process:\n"; (*r_error) += shader.getInfoLog(); @@ -190,7 +85,7 @@ static Vector _compile_shader_glsl(RenderingDevice::ShaderStage p_stage shader.setStrings(&cs_strings, 1); //parse - if (!shader.parse(&default_builtin_resource, DefaultVersion, false, messages)) { + if (!shader.parse(&glslang::DefaultTBuiltInResource, DefaultVersion, false, messages)) { if (r_error) { (*r_error) = "Failed parse:\n"; (*r_error) += shader.getInfoLog(); diff --git a/thirdparty/README.md b/thirdparty/README.md index 46a75d94b0c..2a956ed0b73 100644 --- a/thirdparty/README.md +++ b/thirdparty/README.md @@ -164,7 +164,7 @@ Files extracted from upstream source: ## glslang - Upstream: https://github.com/KhronosGroup/glslang -- Version: git (4fc7a33910fb8e40b970d160e1b38ab3f67fe0f3, 2020) +- Version: git (bacaef3237c515e40d1a24722be48c0a0b30f75f, 2020) - License: glslang Version should be kept in sync with the one of the used Vulkan SDK (see `vulkan` @@ -173,10 +173,12 @@ good glslang commit: https://github.com/KhronosGroup/Vulkan-ValidationLayers/blo Files extracted from upstream source: -- `glslang`, `OGLCompilersDLL`, `SPIRV` +- `glslang` (except `glslang/HLSL`), `OGLCompilersDLL`, `SPIRV` +- `StandAlone/{DirStackFileIncluder.h,ResourceLimits.{cpp,h}}` +- Run `cmake . && make` and copy generated `include/glslang/build_info.h` + to `glslang/build_info.h` - `LICENSE.txt` -- Unnecessary files like `CMakeLists.txt`, `revision.template` and - `updateGrammar` removed. +- Unnecessary files like `CMakeLists.txt` and `updateGrammar` removed. Patches in the `patches` directory should be re-applied after updates. @@ -592,7 +594,7 @@ folder. ## vulkan - Upstream: https://github.com/KhronosGroup/Vulkan-Loader -- Version: sdk-1.2.131.2 (2020) +- Version: sdk-1.2.154.0 (2020) - License: Apache 2.0 Unless there is a specific reason to package a more recent version, please stick @@ -610,11 +612,10 @@ Files extracted from upstream source: `vk_enum_string_helper.h` is taken from the matching `Vulkan-ValidationLayers` SDK release: https://github.com/KhronosGroup/Vulkan-ValidationLayers/blob/master/layers/generated/vk_enum_string_helper.h -Includes custom change to disable MSVC pragma, might be upstreamed via: -https://github.com/KhronosGroup/Vulkan-ValidationLayers/pull/1666 `vk_mem_alloc.h` is taken from https://github.com/GPUOpen-LibrariesAndSDKs/VulkanMemoryAllocator Version: 2.3.0 (2019) +`vk_mem_alloc.cpp` and `android/vk_mem_alloc.cpp` are Godot files and should be preserved on updates. Patches in the `patches` directory should be re-applied after updates. diff --git a/thirdparty/glslang/LICENSE.txt b/thirdparty/glslang/LICENSE.txt index a10c0944f88..5f58565dc45 100644 --- a/thirdparty/glslang/LICENSE.txt +++ b/thirdparty/glslang/LICENSE.txt @@ -1,6 +1,6 @@ Here, glslang proper means core GLSL parsing, HLSL parsing, and SPIR-V code -generation. Glslang proper requires use of two licenses, one that covers -non-preprocessing and an additional one that covers preprocessing. +generation. Glslang proper requires use of a number of licenses, one that covers +preprocessing and others that covers non-preprocessing. Bison was removed long ago. You can build glslang from the source grammar, using tools of your choice, without using bison or any bison files. @@ -23,6 +23,10 @@ Other parts, outside of glslang proper, include: The core of glslang-proper, minus the preprocessor is licenced as follows: +-------------------------------------------------------------------------------- +3-Clause BSD License +-------------------------------------------------------------------------------- + // // Copyright (C) 2015-2018 Google, Inc. // Copyright (C) @@ -59,9 +63,281 @@ The core of glslang-proper, minus the preprocessor is licenced as follows: // POSSIBILITY OF SUCH DAMAGE. // + +-------------------------------------------------------------------------------- +2-Clause BSD License -------------------------------------------------------------------------------- -The preprocessor has the core license stated above, plus an additional licence: +Copyright 2020 The Khronos Group Inc + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +-------------------------------------------------------------------------------- +The MIT License +-------------------------------------------------------------------------------- + +Copyright 2020 The Khronos Group Inc + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +-------------------------------------------------------------------------------- +APACHE LICENSE, VERSION 2.0 +-------------------------------------------------------------------------------- + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +-------------------------------------------------------------------------------- +GPL 3 with special bison exception +-------------------------------------------------------------------------------- + + Bison implementation for Yacc-like parsers in C + + Copyright (C) 1984, 1989-1990, 2000-2015 Free Software Foundation, Inc. + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + + As a special exception, you may create a larger work that contains + part or all of the Bison parser skeleton and distribute that work + under terms of your choice, so long as that work isn't itself a + parser generator using the skeleton or a modified version thereof + as a parser skeleton. Alternatively, if you modify or redistribute + the parser skeleton itself, you may (at your option) remove this + special exception, which will cause the skeleton and the resulting + Bison output files to be licensed under the GNU General Public + License without this special exception. + + This special exception was added by the Free Software Foundation in + version 2.2 of Bison. + +-------------------------------------------------------------------------------- +================================================================================ +-------------------------------------------------------------------------------- + +The preprocessor has the core licenses stated above, plus an additional licence: /****************************************************************************\ Copyright (c) 2002, NVIDIA Corporation. diff --git a/thirdparty/glslang/SPIRV/CInterface/spirv_c_interface.cpp b/thirdparty/glslang/SPIRV/CInterface/spirv_c_interface.cpp new file mode 100644 index 00000000000..a0790f48f14 --- /dev/null +++ b/thirdparty/glslang/SPIRV/CInterface/spirv_c_interface.cpp @@ -0,0 +1,110 @@ +/** + This code is based on the glslang_c_interface implementation by Viktor Latypov +**/ + +/** +BSD 2-Clause License + +Copyright (c) 2019, Viktor Latypov +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +**/ + +#include "glslang/Include/glslang_c_interface.h" + +#include "SPIRV/GlslangToSpv.h" +#include "SPIRV/Logger.h" +#include "SPIRV/SpvTools.h" + +typedef struct glslang_program_s { + glslang::TProgram* program; + std::vector spirv; + std::string loggerMessages; +} glslang_program_t; + +static EShLanguage c_shader_stage(glslang_stage_t stage) +{ + switch (stage) { + case GLSLANG_STAGE_VERTEX: + return EShLangVertex; + case GLSLANG_STAGE_TESSCONTROL: + return EShLangTessControl; + case GLSLANG_STAGE_TESSEVALUATION: + return EShLangTessEvaluation; + case GLSLANG_STAGE_GEOMETRY: + return EShLangGeometry; + case GLSLANG_STAGE_FRAGMENT: + return EShLangFragment; + case GLSLANG_STAGE_COMPUTE: + return EShLangCompute; + case GLSLANG_STAGE_RAYGEN_NV: + return EShLangRayGen; + case GLSLANG_STAGE_INTERSECT_NV: + return EShLangIntersect; + case GLSLANG_STAGE_ANYHIT_NV: + return EShLangAnyHit; + case GLSLANG_STAGE_CLOSESTHIT_NV: + return EShLangClosestHit; + case GLSLANG_STAGE_MISS_NV: + return EShLangMiss; + case GLSLANG_STAGE_CALLABLE_NV: + return EShLangCallable; + case GLSLANG_STAGE_TASK_NV: + return EShLangTaskNV; + case GLSLANG_STAGE_MESH_NV: + return EShLangMeshNV; + default: + break; + } + return EShLangCount; +} + +GLSLANG_EXPORT void glslang_program_SPIRV_generate(glslang_program_t* program, glslang_stage_t stage) +{ + spv::SpvBuildLogger logger; + glslang::SpvOptions spvOptions; + spvOptions.validate = true; + + const glslang::TIntermediate* intermediate = program->program->getIntermediate(c_shader_stage(stage)); + + glslang::GlslangToSpv(*intermediate, program->spirv, &logger, &spvOptions); + + program->loggerMessages = logger.getAllMessages(); +} + +GLSLANG_EXPORT size_t glslang_program_SPIRV_get_size(glslang_program_t* program) { return program->spirv.size(); } + +GLSLANG_EXPORT void glslang_program_SPIRV_get(glslang_program_t* program, unsigned int* out) +{ + memcpy(out, program->spirv.data(), program->spirv.size() * sizeof(unsigned int)); +} + +GLSLANG_EXPORT unsigned int* glslang_program_SPIRV_get_ptr(glslang_program_t* program) +{ + return program->spirv.data(); +} + +GLSLANG_EXPORT const char* glslang_program_SPIRV_get_messages(glslang_program_t* program) +{ + return program->loggerMessages.empty() ? nullptr : program->loggerMessages.c_str(); +} diff --git a/thirdparty/glslang/SPIRV/GLSL.ext.EXT.h b/thirdparty/glslang/SPIRV/GLSL.ext.EXT.h index 40164b6187f..6eb0eeeada4 100644 --- a/thirdparty/glslang/SPIRV/GLSL.ext.EXT.h +++ b/thirdparty/glslang/SPIRV/GLSL.ext.EXT.h @@ -35,5 +35,6 @@ static const char* const E_SPV_EXT_shader_viewport_index_layer = "SPV_EXT_shade static const char* const E_SPV_EXT_fragment_fully_covered = "SPV_EXT_fragment_fully_covered"; static const char* const E_SPV_EXT_fragment_invocation_density = "SPV_EXT_fragment_invocation_density"; static const char* const E_SPV_EXT_demote_to_helper_invocation = "SPV_EXT_demote_to_helper_invocation"; +static const char* const E_SPV_EXT_shader_atomic_float_add = "SPV_EXT_shader_atomic_float_add"; #endif // #ifndef GLSLextEXT_H diff --git a/thirdparty/glslang/SPIRV/GLSL.ext.KHR.h b/thirdparty/glslang/SPIRV/GLSL.ext.KHR.h index e58e836a8d8..d783a8f299e 100644 --- a/thirdparty/glslang/SPIRV/GLSL.ext.KHR.h +++ b/thirdparty/glslang/SPIRV/GLSL.ext.KHR.h @@ -1,5 +1,6 @@ /* -** Copyright (c) 2014-2016 The Khronos Group Inc. +** Copyright (c) 2014-2020 The Khronos Group Inc. +** Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved. ** ** Permission is hereby granted, free of charge, to any person obtaining a copy ** of this software and/or associated documentation files (the "Materials"), @@ -44,5 +45,7 @@ static const char* const E_SPV_EXT_physical_storage_buffer = "SPV_EXT_physi static const char* const E_SPV_KHR_physical_storage_buffer = "SPV_KHR_physical_storage_buffer"; static const char* const E_SPV_EXT_fragment_shader_interlock = "SPV_EXT_fragment_shader_interlock"; static const char* const E_SPV_KHR_shader_clock = "SPV_KHR_shader_clock"; - +static const char* const E_SPV_KHR_non_semantic_info = "SPV_KHR_non_semantic_info"; +static const char* const E_SPV_KHR_ray_tracing = "SPV_KHR_ray_tracing"; +static const char* const E_SPV_KHR_ray_query = "SPV_KHR_ray_query"; #endif // #ifndef GLSLextKHR_H diff --git a/thirdparty/glslang/SPIRV/GlslangToSpv.cpp b/thirdparty/glslang/SPIRV/GlslangToSpv.cpp old mode 100755 new mode 100644 index 0c8a87e3ce2..c85541603f7 --- a/thirdparty/glslang/SPIRV/GlslangToSpv.cpp +++ b/thirdparty/glslang/SPIRV/GlslangToSpv.cpp @@ -1,7 +1,8 @@ // // Copyright (C) 2014-2016 LunarG, Inc. -// Copyright (C) 2015-2018 Google, Inc. +// Copyright (C) 2015-2020 Google, Inc. // Copyright (C) 2017 ARM Limited. +// Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved. // // All rights reserved. // @@ -48,13 +49,16 @@ namespace spv { #include "GLSL.ext.EXT.h" #include "GLSL.ext.AMD.h" #include "GLSL.ext.NV.h" + #include "NonSemanticDebugPrintf.h" } // Glslang includes #include "../glslang/MachineIndependent/localintermediate.h" #include "../glslang/MachineIndependent/SymbolTable.h" #include "../glslang/Include/Common.h" -#include "../glslang/Include/revision.h" + +// Build-time generated includes +#include "glslang/build_info.h" #include #include @@ -187,7 +191,8 @@ protected: void makeGlobalInitializers(const glslang::TIntermSequence&); void visitFunctions(const glslang::TIntermSequence&); void handleFunctionEntry(const glslang::TIntermAggregate* node); - void translateArguments(const glslang::TIntermAggregate& node, std::vector& arguments, spv::Builder::AccessChain::CoherentFlags &lvalueCoherentFlags); + void translateArguments(const glslang::TIntermAggregate& node, std::vector& arguments, + spv::Builder::AccessChain::CoherentFlags &lvalueCoherentFlags); void translateArguments(glslang::TIntermUnary& node, std::vector& arguments); spv::Id createImageTextureFunctionCall(glslang::TIntermOperator* node); spv::Id handleUserFunctionCall(const glslang::TIntermAggregate*); @@ -196,28 +201,36 @@ protected: glslang::TBasicType typeProxy, bool reduceComparison = true); spv::Id createBinaryMatrixOperation(spv::Op, OpDecorations&, spv::Id typeId, spv::Id left, spv::Id right); spv::Id createUnaryOperation(glslang::TOperator op, OpDecorations&, spv::Id typeId, spv::Id operand, - glslang::TBasicType typeProxy, const spv::Builder::AccessChain::CoherentFlags &lvalueCoherentFlags); + glslang::TBasicType typeProxy, + const spv::Builder::AccessChain::CoherentFlags &lvalueCoherentFlags); spv::Id createUnaryMatrixOperation(spv::Op op, OpDecorations&, spv::Id typeId, spv::Id operand, glslang::TBasicType typeProxy); spv::Id createConversion(glslang::TOperator op, OpDecorations&, spv::Id destTypeId, spv::Id operand, glslang::TBasicType typeProxy); spv::Id createIntWidthConversion(glslang::TOperator op, spv::Id operand, int vectorSize); spv::Id makeSmearedConstant(spv::Id constant, int vectorSize); - spv::Id createAtomicOperation(glslang::TOperator op, spv::Decoration precision, spv::Id typeId, std::vector& operands, glslang::TBasicType typeProxy, const spv::Builder::AccessChain::CoherentFlags &lvalueCoherentFlags); - spv::Id createInvocationsOperation(glslang::TOperator op, spv::Id typeId, std::vector& operands, glslang::TBasicType typeProxy); - spv::Id CreateInvocationsVectorOperation(spv::Op op, spv::GroupOperation groupOperation, spv::Id typeId, std::vector& operands); - spv::Id createSubgroupOperation(glslang::TOperator op, spv::Id typeId, std::vector& operands, glslang::TBasicType typeProxy); - spv::Id createMiscOperation(glslang::TOperator op, spv::Decoration precision, spv::Id typeId, std::vector& operands, glslang::TBasicType typeProxy); + spv::Id createAtomicOperation(glslang::TOperator op, spv::Decoration precision, spv::Id typeId, + std::vector& operands, glslang::TBasicType typeProxy, + const spv::Builder::AccessChain::CoherentFlags &lvalueCoherentFlags); + spv::Id createInvocationsOperation(glslang::TOperator op, spv::Id typeId, std::vector& operands, + glslang::TBasicType typeProxy); + spv::Id CreateInvocationsVectorOperation(spv::Op op, spv::GroupOperation groupOperation, + spv::Id typeId, std::vector& operands); + spv::Id createSubgroupOperation(glslang::TOperator op, spv::Id typeId, std::vector& operands, + glslang::TBasicType typeProxy); + spv::Id createMiscOperation(glslang::TOperator op, spv::Decoration precision, spv::Id typeId, + std::vector& operands, glslang::TBasicType typeProxy); spv::Id createNoArgOperation(glslang::TOperator op, spv::Decoration precision, spv::Id typeId); spv::Id getSymbolId(const glslang::TIntermSymbol* node); void addMeshNVDecoration(spv::Id id, int member, const glslang::TQualifier & qualifier); spv::Id createSpvConstant(const glslang::TIntermTyped&); - spv::Id createSpvConstantFromConstUnionArray(const glslang::TType& type, const glslang::TConstUnionArray&, int& nextConst, bool specConstant); + spv::Id createSpvConstantFromConstUnionArray(const glslang::TType& type, const glslang::TConstUnionArray&, + int& nextConst, bool specConstant); bool isTrivialLeaf(const glslang::TIntermTyped* node); bool isTrivial(const glslang::TIntermTyped* node); spv::Id createShortCircuit(glslang::TOperator, glslang::TIntermTyped& left, glslang::TIntermTyped& right); spv::Id getExtBuiltins(const char* name); - std::pair getForcedType(spv::BuiltIn, const glslang::TType&); + std::pair getForcedType(glslang::TBuiltInVariable builtIn, const glslang::TType&); spv::Id translateForcedType(spv::Id object); spv::Id createCompositeConstruct(spv::Id typeId, std::vector constituents); @@ -233,15 +246,18 @@ protected: spv::Builder builder; bool inEntryPoint; bool entryPointTerminated; - bool linkageOnly; // true when visiting the set of objects in the AST present only for establishing interface, whether or not they were statically used + bool linkageOnly; // true when visiting the set of objects in the AST present only for + // establishing interface, whether or not they were statically used std::set iOSet; // all input/output variables from either static use or declaration of interface const glslang::TIntermediate* glslangIntermediate; bool nanMinMaxClamp; // true if use NMin/NMax/NClamp instead of FMin/FMax/FClamp spv::Id stdBuiltins; + spv::Id nonSemanticDebugPrintf; std::unordered_map extBuiltinMap; std::unordered_map symbolValues; - std::unordered_set rValueParameters; // set of formal function parameters passed as rValues, rather than a pointer + std::unordered_set rValueParameters; // set of formal function parameters passed as rValues, + // rather than a pointer std::unordered_map functionMap; std::unordered_map structMap[glslang::ElpCount][glslang::ElmCount]; // for mapping glslang block indices to spv indices (e.g., due to hidden members): @@ -267,6 +283,8 @@ spv::SourceLanguage TranslateSourceLanguage(glslang::EShSource source, EProfile { #ifdef GLSLANG_WEB return spv::SourceLanguageESSL; +#elif defined(GLSLANG_ANGLE) + return spv::SourceLanguageGLSL; #endif switch (source) { @@ -299,12 +317,12 @@ spv::ExecutionModel TranslateExecutionModel(EShLanguage stage) case EShLangTessControl: return spv::ExecutionModelTessellationControl; case EShLangTessEvaluation: return spv::ExecutionModelTessellationEvaluation; case EShLangGeometry: return spv::ExecutionModelGeometry; - case EShLangRayGenNV: return spv::ExecutionModelRayGenerationNV; - case EShLangIntersectNV: return spv::ExecutionModelIntersectionNV; - case EShLangAnyHitNV: return spv::ExecutionModelAnyHitNV; - case EShLangClosestHitNV: return spv::ExecutionModelClosestHitNV; - case EShLangMissNV: return spv::ExecutionModelMissNV; - case EShLangCallableNV: return spv::ExecutionModelCallableNV; + case EShLangRayGen: return spv::ExecutionModelRayGenerationKHR; + case EShLangIntersect: return spv::ExecutionModelIntersectionKHR; + case EShLangAnyHit: return spv::ExecutionModelAnyHitKHR; + case EShLangClosestHit: return spv::ExecutionModelClosestHitKHR; + case EShLangMiss: return spv::ExecutionModelMissKHR; + case EShLangCallable: return spv::ExecutionModelCallableKHR; case EShLangTaskNV: return spv::ExecutionModelTaskNV; case EShLangMeshNV: return spv::ExecutionModelMeshNV; #endif @@ -358,11 +376,11 @@ spv::Decoration TranslateBlockDecoration(const glslang::TType& type, bool useSto case glslang::EvqVaryingIn: return spv::DecorationBlock; case glslang::EvqVaryingOut: return spv::DecorationBlock; #ifndef GLSLANG_WEB - case glslang::EvqPayloadNV: return spv::DecorationBlock; - case glslang::EvqPayloadInNV: return spv::DecorationBlock; - case glslang::EvqHitAttrNV: return spv::DecorationBlock; - case glslang::EvqCallableDataNV: return spv::DecorationBlock; - case glslang::EvqCallableDataInNV: return spv::DecorationBlock; + case glslang::EvqPayload: return spv::DecorationBlock; + case glslang::EvqPayloadIn: return spv::DecorationBlock; + case glslang::EvqHitAttr: return spv::DecorationBlock; + case glslang::EvqCallableData: return spv::DecorationBlock; + case glslang::EvqCallableDataIn: return spv::DecorationBlock; #endif default: assert(0); @@ -374,7 +392,8 @@ spv::Decoration TranslateBlockDecoration(const glslang::TType& type, bool useSto } // Translate glslang type to SPIR-V memory decorations. -void TranslateMemoryDecoration(const glslang::TQualifier& qualifier, std::vector& memory, bool useVulkanMemoryModel) +void TranslateMemoryDecoration(const glslang::TQualifier& qualifier, std::vector& memory, + bool useVulkanMemoryModel) { if (!useVulkanMemoryModel) { if (qualifier.isCoherent()) @@ -433,11 +452,11 @@ spv::Decoration TranslateLayoutDecoration(const glslang::TType& type, glslang::T } return spv::DecorationMax; #ifndef GLSLANG_WEB - case glslang::EvqPayloadNV: - case glslang::EvqPayloadInNV: - case glslang::EvqHitAttrNV: - case glslang::EvqCallableDataNV: - case glslang::EvqCallableDataInNV: + case glslang::EvqPayload: + case glslang::EvqPayloadIn: + case glslang::EvqHitAttr: + case glslang::EvqCallableData: + case glslang::EvqCallableDataIn: return spv::DecorationMax; #endif default: @@ -529,15 +548,11 @@ spv::MemoryAccessMask TGlslangToSpvTraverser::TranslateMemoryAccess( if (!glslangIntermediate->usingVulkanMemoryModel() || coherentFlags.isImage) return mask; - if (coherentFlags.volatil || - coherentFlags.coherent || - coherentFlags.devicecoherent || - coherentFlags.queuefamilycoherent || - coherentFlags.workgroupcoherent || - coherentFlags.subgroupcoherent) { + if (coherentFlags.isVolatile() || coherentFlags.anyCoherent()) { mask = mask | spv::MemoryAccessMakePointerAvailableKHRMask | spv::MemoryAccessMakePointerVisibleKHRMask; } + if (coherentFlags.nonprivate) { mask = mask | spv::MemoryAccessNonPrivatePointerKHRMask; } @@ -562,11 +577,7 @@ spv::ImageOperandsMask TGlslangToSpvTraverser::TranslateImageOperands( return mask; if (coherentFlags.volatil || - coherentFlags.coherent || - coherentFlags.devicecoherent || - coherentFlags.queuefamilycoherent || - coherentFlags.workgroupcoherent || - coherentFlags.subgroupcoherent) { + coherentFlags.anyCoherent()) { mask = mask | spv::ImageOperandsMakeTexelAvailableKHRMask | spv::ImageOperandsMakeTexelVisibleKHRMask; } @@ -595,14 +606,11 @@ spv::Builder::AccessChain::CoherentFlags TGlslangToSpvTraverser::TranslateCohere flags.workgroupcoherent = type.getQualifier().workgroupcoherent || type.getQualifier().storage == glslang::EvqShared; flags.subgroupcoherent = type.getQualifier().subgroupcoherent; + flags.shadercallcoherent = type.getQualifier().shadercallcoherent; flags.volatil = type.getQualifier().volatil; // *coherent variables are implicitly nonprivate in GLSL flags.nonprivate = type.getQualifier().nonprivate || - flags.subgroupcoherent || - flags.workgroupcoherent || - flags.queuefamilycoherent || - flags.devicecoherent || - flags.coherent || + flags.anyCoherent() || flags.volatil; flags.isImage = type.getBasicType() == glslang::EbtSampler; #endif @@ -626,6 +634,8 @@ spv::Scope TGlslangToSpvTraverser::TranslateMemoryScope( scope = spv::ScopeWorkgroup; } else if (coherentFlags.subgroupcoherent) { scope = spv::ScopeSubgroup; + } else if (coherentFlags.shadercallcoherent) { + scope = spv::ScopeShaderCallKHR; } if (glslangIntermediate->usingVulkanMemoryModel() && scope == spv::ScopeDevice) { builder.addCapability(spv::CapabilityVulkanMemoryModelDeviceScopeKHR); @@ -640,7 +650,8 @@ spv::Scope TGlslangToSpvTraverser::TranslateMemoryScope( // is generated only when using the variable in an executable instruction, but not when // just declaring a struct member variable with it. This is true for PointSize, // ClipDistance, and CullDistance. -spv::BuiltIn TGlslangToSpvTraverser::TranslateBuiltInDecoration(glslang::TBuiltInVariable builtIn, bool memberDeclaration) +spv::BuiltIn TGlslangToSpvTraverser::TranslateBuiltInDecoration(glslang::TBuiltInVariable builtIn, + bool memberDeclaration) { switch (builtIn) { case glslang::EbvPointSize: @@ -933,34 +944,38 @@ spv::BuiltIn TGlslangToSpvTraverser::TranslateBuiltInDecoration(glslang::TBuiltI return spv::BuiltInInvocationsPerPixelNV; // ray tracing - case glslang::EbvLaunchIdNV: - return spv::BuiltInLaunchIdNV; - case glslang::EbvLaunchSizeNV: - return spv::BuiltInLaunchSizeNV; - case glslang::EbvWorldRayOriginNV: - return spv::BuiltInWorldRayOriginNV; - case glslang::EbvWorldRayDirectionNV: - return spv::BuiltInWorldRayDirectionNV; - case glslang::EbvObjectRayOriginNV: - return spv::BuiltInObjectRayOriginNV; - case glslang::EbvObjectRayDirectionNV: - return spv::BuiltInObjectRayDirectionNV; - case glslang::EbvRayTminNV: - return spv::BuiltInRayTminNV; - case glslang::EbvRayTmaxNV: - return spv::BuiltInRayTmaxNV; - case glslang::EbvInstanceCustomIndexNV: - return spv::BuiltInInstanceCustomIndexNV; - case glslang::EbvHitTNV: - return spv::BuiltInHitTNV; - case glslang::EbvHitKindNV: - return spv::BuiltInHitKindNV; - case glslang::EbvObjectToWorldNV: - return spv::BuiltInObjectToWorldNV; - case glslang::EbvWorldToObjectNV: - return spv::BuiltInWorldToObjectNV; - case glslang::EbvIncomingRayFlagsNV: - return spv::BuiltInIncomingRayFlagsNV; + case glslang::EbvLaunchId: + return spv::BuiltInLaunchIdKHR; + case glslang::EbvLaunchSize: + return spv::BuiltInLaunchSizeKHR; + case glslang::EbvWorldRayOrigin: + return spv::BuiltInWorldRayOriginKHR; + case glslang::EbvWorldRayDirection: + return spv::BuiltInWorldRayDirectionKHR; + case glslang::EbvObjectRayOrigin: + return spv::BuiltInObjectRayOriginKHR; + case glslang::EbvObjectRayDirection: + return spv::BuiltInObjectRayDirectionKHR; + case glslang::EbvRayTmin: + return spv::BuiltInRayTminKHR; + case glslang::EbvRayTmax: + return spv::BuiltInRayTmaxKHR; + case glslang::EbvInstanceCustomIndex: + return spv::BuiltInInstanceCustomIndexKHR; + case glslang::EbvHitT: + return spv::BuiltInHitTKHR; + case glslang::EbvHitKind: + return spv::BuiltInHitKindKHR; + case glslang::EbvObjectToWorld: + case glslang::EbvObjectToWorld3x4: + return spv::BuiltInObjectToWorldKHR; + case glslang::EbvWorldToObject: + case glslang::EbvWorldToObject3x4: + return spv::BuiltInWorldToObjectKHR; + case glslang::EbvIncomingRayFlags: + return spv::BuiltInIncomingRayFlagsKHR; + case glslang::EbvGeometryIndex: + return spv::BuiltInRayGeometryIndexKHR; // barycentrics case glslang::EbvBaryCoordNV: @@ -1106,7 +1121,8 @@ spv::ImageFormat TGlslangToSpvTraverser::TranslateImageFormat(const glslang::TTy } } -spv::SelectionControlMask TGlslangToSpvTraverser::TranslateSelectionControl(const glslang::TIntermSelection& selectionNode) const +spv::SelectionControlMask TGlslangToSpvTraverser::TranslateSelectionControl( + const glslang::TIntermSelection& selectionNode) const { if (selectionNode.getFlatten()) return spv::SelectionControlFlattenMask; @@ -1115,7 +1131,8 @@ spv::SelectionControlMask TGlslangToSpvTraverser::TranslateSelectionControl(cons return spv::SelectionControlMaskNone; } -spv::SelectionControlMask TGlslangToSpvTraverser::TranslateSwitchControl(const glslang::TIntermSwitch& switchNode) const +spv::SelectionControlMask TGlslangToSpvTraverser::TranslateSwitchControl(const glslang::TIntermSwitch& switchNode) + const { if (switchNode.getFlatten()) return spv::SelectionControlFlattenMask; @@ -1169,6 +1186,8 @@ spv::LoopControlMask TGlslangToSpvTraverser::TranslateLoopControl(const glslang: // Translate glslang type to SPIR-V storage class. spv::StorageClass TGlslangToSpvTraverser::TranslateStorageClass(const glslang::TType& type) { + if (type.getBasicType() == glslang::EbtRayQuery) + return spv::StorageClassFunction; if (type.getQualifier().isPipeInput()) return spv::StorageClassInput; if (type.getQualifier().isPipeOutput()) @@ -1183,8 +1202,8 @@ spv::StorageClass TGlslangToSpvTraverser::TranslateStorageClass(const glslang::T } if (type.getQualifier().isUniformOrBuffer() && - type.getQualifier().isShaderRecordNV()) { - return spv::StorageClassShaderRecordBufferNV; + type.getQualifier().isShaderRecord()) { + return spv::StorageClassShaderRecordBufferKHR; } if (glslangIntermediate->usingStorageBuffer() && type.getQualifier().storage == glslang::EvqBuffer) { @@ -1206,11 +1225,11 @@ spv::StorageClass TGlslangToSpvTraverser::TranslateStorageClass(const glslang::T case glslang::EvqTemporary: return spv::StorageClassFunction; case glslang::EvqShared: return spv::StorageClassWorkgroup; #ifndef GLSLANG_WEB - case glslang::EvqPayloadNV: return spv::StorageClassRayPayloadNV; - case glslang::EvqPayloadInNV: return spv::StorageClassIncomingRayPayloadNV; - case glslang::EvqHitAttrNV: return spv::StorageClassHitAttributeNV; - case glslang::EvqCallableDataNV: return spv::StorageClassCallableDataNV; - case glslang::EvqCallableDataInNV: return spv::StorageClassIncomingCallableDataNV; + case glslang::EvqPayload: return spv::StorageClassRayPayloadKHR; + case glslang::EvqPayloadIn: return spv::StorageClassIncomingRayPayloadKHR; + case glslang::EvqHitAttr: return spv::StorageClassHitAttributeKHR; + case glslang::EvqCallableData: return spv::StorageClassCallableDataKHR; + case glslang::EvqCallableDataIn: return spv::StorageClassIncomingCallableDataKHR; #endif default: assert(0); @@ -1270,14 +1289,15 @@ bool IsDescriptorResource(const glslang::TType& type) // uniform and buffer blocks are included, unless it is a push_constant if (type.getBasicType() == glslang::EbtBlock) return type.getQualifier().isUniformOrBuffer() && - ! type.getQualifier().isShaderRecordNV() && + ! type.getQualifier().isShaderRecord() && ! type.getQualifier().isPushConstant(); // non block... // basically samplerXXX/subpass/sampler/texture are all included // if they are the global-scope-class, not the function parameter // (or local, if they ever exist) class. - if (type.getBasicType() == glslang::EbtSampler) + if (type.getBasicType() == glslang::EbtSampler || + type.getBasicType() == glslang::EbtAccStruct) return type.getQualifier().isUniformOrBuffer(); // None of the above. @@ -1320,6 +1340,8 @@ void InheritQualifiers(glslang::TQualifier& child, const glslang::TQualifier& pa child.workgroupcoherent = true; if (parent.subgroupcoherent) child.subgroupcoherent = true; + if (parent.shadercallcoherent) + child.shadercallcoherent = true; if (parent.nonprivate) child.nonprivate = true; if (parent.volatil) @@ -1350,16 +1372,18 @@ bool HasNonLayoutQualifiers(const glslang::TType& type, const glslang::TQualifie // Implement the TGlslangToSpvTraverser class. // -TGlslangToSpvTraverser::TGlslangToSpvTraverser(unsigned int spvVersion, const glslang::TIntermediate* glslangIntermediate, - spv::SpvBuildLogger* buildLogger, glslang::SpvOptions& options) - : TIntermTraverser(true, false, true), - options(options), - shaderEntry(nullptr), currentFunction(nullptr), - sequenceDepth(0), logger(buildLogger), - builder(spvVersion, (glslang::GetKhronosToolId() << 16) | glslang::GetSpirvGeneratorVersion(), logger), - inEntryPoint(false), entryPointTerminated(false), linkageOnly(false), - glslangIntermediate(glslangIntermediate), - nanMinMaxClamp(glslangIntermediate->getNanMinMaxClamp()) +TGlslangToSpvTraverser::TGlslangToSpvTraverser(unsigned int spvVersion, + const glslang::TIntermediate* glslangIntermediate, + spv::SpvBuildLogger* buildLogger, glslang::SpvOptions& options) : + TIntermTraverser(true, false, true), + options(options), + shaderEntry(nullptr), currentFunction(nullptr), + sequenceDepth(0), logger(buildLogger), + builder(spvVersion, (glslang::GetKhronosToolId() << 16) | glslang::GetSpirvGeneratorVersion(), logger), + inEntryPoint(false), entryPointTerminated(false), linkageOnly(false), + glslangIntermediate(glslangIntermediate), + nanMinMaxClamp(glslangIntermediate->getNanMinMaxClamp()), + nonSemanticDebugPrintf(0) { spv::ExecutionModel executionModel = TranslateExecutionModel(glslangIntermediate->getStage()); @@ -1402,7 +1426,7 @@ TGlslangToSpvTraverser::TGlslangToSpvTraverser(unsigned int spvVersion, const gl addressingModel = spv::AddressingModelPhysicalStorageBuffer64EXT; builder.addIncorporatedExtension(spv::E_SPV_EXT_physical_storage_buffer, spv::Spv_1_5); builder.addCapability(spv::CapabilityPhysicalStorageBufferAddressesEXT); - }; + } if (glslangIntermediate->usingVulkanMemoryModel()) { memoryModel = spv::MemoryModelVulkanKHR; builder.addCapability(spv::CapabilityVulkanMemoryModelKHR); @@ -1429,6 +1453,10 @@ TGlslangToSpvTraverser::TGlslangToSpvTraverser(unsigned int spvVersion, const gl builder.addExecutionMode(shaderEntry, spv::ExecutionModeXfb); } + if (glslangIntermediate->getLayoutPrimitiveCulling()) { + builder.addCapability(spv::CapabilityRayTraversalPrimitiveCullingProvisionalKHR); + } + unsigned int mode; switch (glslangIntermediate->getStage()) { case EShLangVertex: @@ -1458,6 +1486,7 @@ TGlslangToSpvTraverser::TGlslangToSpvTraverser(unsigned int spvVersion, const gl builder.addExecutionMode(shaderEntry, spv::ExecutionModeDepthReplacing); #ifndef GLSLANG_WEB + switch(glslangIntermediate->getDepth()) { case glslang::EldGreater: mode = spv::ExecutionModeDepthGreater; break; case glslang::EldLess: mode = spv::ExecutionModeDepthLess; break; @@ -1495,7 +1524,7 @@ TGlslangToSpvTraverser::TGlslangToSpvTraverser(unsigned int spvVersion, const gl builder.addExtension(spv::E_SPV_EXT_fragment_shader_interlock); } #endif - break; + break; case EShLangCompute: builder.addCapability(spv::CapabilityShader); @@ -1520,7 +1549,8 @@ TGlslangToSpvTraverser::TGlslangToSpvTraverser(unsigned int spvVersion, const gl glslang::TLayoutGeometry primitive; if (glslangIntermediate->getStage() == EShLangTessControl) { - builder.addExecutionMode(shaderEntry, spv::ExecutionModeOutputVertices, glslangIntermediate->getVertices()); + builder.addExecutionMode(shaderEntry, spv::ExecutionModeOutputVertices, + glslangIntermediate->getVertices()); primitive = glslangIntermediate->getOutputPrimitive(); } else { primitive = glslangIntermediate->getInputPrimitive(); @@ -1582,15 +1612,24 @@ TGlslangToSpvTraverser::TGlslangToSpvTraverser(unsigned int spvVersion, const gl builder.addExecutionMode(shaderEntry, spv::ExecutionModeOutputVertices, glslangIntermediate->getVertices()); break; - case EShLangRayGenNV: - case EShLangIntersectNV: - case EShLangAnyHitNV: - case EShLangClosestHitNV: - case EShLangMissNV: - case EShLangCallableNV: - builder.addCapability(spv::CapabilityRayTracingNV); - builder.addExtension("SPV_NV_ray_tracing"); + case EShLangRayGen: + case EShLangIntersect: + case EShLangAnyHit: + case EShLangClosestHit: + case EShLangMiss: + case EShLangCallable: + { + auto& extensions = glslangIntermediate->getRequestedExtensions(); + if (extensions.find("GL_NV_ray_tracing") == extensions.end()) { + builder.addCapability(spv::CapabilityRayTracingProvisionalKHR); + builder.addExtension("SPV_KHR_ray_tracing"); + } + else { + builder.addCapability(spv::CapabilityRayTracingNV); + builder.addExtension("SPV_NV_ray_tracing"); + } break; + } case EShLangTaskNV: case EShLangMeshNV: builder.addCapability(spv::CapabilityMeshShadingNV); @@ -1599,8 +1638,10 @@ TGlslangToSpvTraverser::TGlslangToSpvTraverser(unsigned int spvVersion, const gl glslangIntermediate->getLocalSize(1), glslangIntermediate->getLocalSize(2)); if (glslangIntermediate->getStage() == EShLangMeshNV) { - builder.addExecutionMode(shaderEntry, spv::ExecutionModeOutputVertices, glslangIntermediate->getVertices()); - builder.addExecutionMode(shaderEntry, spv::ExecutionModeOutputPrimitivesNV, glslangIntermediate->getPrimitives()); + builder.addExecutionMode(shaderEntry, spv::ExecutionModeOutputVertices, + glslangIntermediate->getVertices()); + builder.addExecutionMode(shaderEntry, spv::ExecutionModeOutputPrimitivesNV, + glslangIntermediate->getPrimitives()); switch (glslangIntermediate->getOutputPrimitive()) { case glslang::ElgPoints: mode = spv::ExecutionModeOutputPoints; break; @@ -1674,20 +1715,24 @@ void TGlslangToSpvTraverser::visitSymbol(glslang::TIntermSymbol* symbol) spv::Id id = getSymbolId(symbol); if (builder.isPointer(id)) { - // Include all "static use" and "linkage only" interface variables on the OpEntryPoint instruction - // Consider adding to the OpEntryPoint interface list. - // Only looking at structures if they have at least one member. - if (!symbol->getType().isStruct() || symbol->getType().getStruct()->size() > 0) { - spv::StorageClass sc = builder.getStorageClass(id); - // Before SPIR-V 1.4, we only want to include Input and Output. - // Starting with SPIR-V 1.4, we want all globals. - if ((glslangIntermediate->getSpv().spv >= glslang::EShTargetSpv_1_4 && sc != spv::StorageClassFunction) || - (sc == spv::StorageClassInput || sc == spv::StorageClassOutput)) { - iOSet.insert(id); + if (!symbol->getType().getQualifier().isParamInput() && + !symbol->getType().getQualifier().isParamOutput()) { + // Include all "static use" and "linkage only" interface variables on the OpEntryPoint instruction + // Consider adding to the OpEntryPoint interface list. + // Only looking at structures if they have at least one member. + if (!symbol->getType().isStruct() || symbol->getType().getStruct()->size() > 0) { + spv::StorageClass sc = builder.getStorageClass(id); + // Before SPIR-V 1.4, we only want to include Input and Output. + // Starting with SPIR-V 1.4, we want all globals. + if ((glslangIntermediate->getSpv().spv >= glslang::EShTargetSpv_1_4 && builder.isGlobalStorage(id)) || + (sc == spv::StorageClassInput || sc == spv::StorageClassOutput)) { + iOSet.insert(id); + } } } - // If the SPIR-V type is required to be different than the AST type, + // If the SPIR-V type is required to be different than the AST type + // (for ex SubgroupMasks or 3x4 ObjectToWorld/WorldToObject matrices), // translate now from the SPIR-V type to the AST type, for the consuming // operation. // Note this turns it from an l-value to an r-value. @@ -1847,7 +1892,8 @@ bool TGlslangToSpvTraverser::visitBinary(glslang::TVisit /* visit */, glslang::T int dummySize; builder.accessChainPushSwizzle(swizzle, convertGlslangToSpvType(node->getLeft()->getType()), TranslateCoherent(node->getLeft()->getType()), - glslangIntermediate->getBaseAlignmentScalar(node->getLeft()->getType(), dummySize)); + glslangIntermediate->getBaseAlignmentScalar( + node->getLeft()->getType(), dummySize)); } else { // Load through a block reference is performed with a dot operator that @@ -1877,7 +1923,9 @@ bool TGlslangToSpvTraverser::visitBinary(glslang::TVisit /* visit */, glslang::T } // normal case for indexing array or structure or block - builder.accessChainPush(builder.makeIntConstant(spvIndex), TranslateCoherent(node->getLeft()->getType()), node->getLeft()->getType().getBufferReferenceAlignment()); + builder.accessChainPush(builder.makeIntConstant(spvIndex), + TranslateCoherent(node->getLeft()->getType()), + node->getLeft()->getType().getBufferReferenceAlignment()); // Add capabilities here for accessing PointSize and clip/cull distance. // We have deferred generation of associated capabilities until now. @@ -1914,9 +1962,11 @@ bool TGlslangToSpvTraverser::visitBinary(glslang::TVisit /* visit */, glslang::T int dummySize; builder.accessChainPushComponent(index, convertGlslangToSpvType(node->getLeft()->getType()), TranslateCoherent(node->getLeft()->getType()), - glslangIntermediate->getBaseAlignmentScalar(node->getLeft()->getType(), dummySize)); + glslangIntermediate->getBaseAlignmentScalar(node->getLeft()->getType(), + dummySize)); } else - builder.accessChainPush(index, TranslateCoherent(node->getLeft()->getType()), node->getLeft()->getType().getBufferReferenceAlignment()); + builder.accessChainPush(index, TranslateCoherent(node->getLeft()->getType()), + node->getLeft()->getType().getBufferReferenceAlignment()); } return false; case glslang::EOpVectorSwizzle: @@ -1927,7 +1977,8 @@ bool TGlslangToSpvTraverser::visitBinary(glslang::TVisit /* visit */, glslang::T int dummySize; builder.accessChainPushSwizzle(swizzle, convertGlslangToSpvType(node->getLeft()->getType()), TranslateCoherent(node->getLeft()->getType()), - glslangIntermediate->getBaseAlignmentScalar(node->getLeft()->getType(), dummySize)); + glslangIntermediate->getBaseAlignmentScalar(node->getLeft()->getType(), + dummySize)); } return false; case glslang::EOpMatrixSwizzle: @@ -1943,7 +1994,8 @@ bool TGlslangToSpvTraverser::visitBinary(glslang::TVisit /* visit */, glslang::T if (isTrivial(node->getRight()->getAsTyped())) break; // handle below as a normal binary operation // otherwise, we need to do dynamic short circuiting on the right operand - spv::Id result = createShortCircuit(node->getOp(), *node->getLeft()->getAsTyped(), *node->getRight()->getAsTyped()); + spv::Id result = createShortCircuit(node->getOp(), *node->getLeft()->getAsTyped(), + *node->getRight()->getAsTyped()); builder.clearAccessChain(); builder.setAccessChainRValue(result); } @@ -1985,16 +2037,16 @@ bool TGlslangToSpvTraverser::visitBinary(glslang::TVisit /* visit */, glslang::T // Figure out what, if any, type changes are needed when accessing a specific built-in. // Returns . // Also see comment for 'forceType', regarding tracking SPIR-V-required types. -std::pair TGlslangToSpvTraverser::getForcedType(spv::BuiltIn builtIn, +std::pair TGlslangToSpvTraverser::getForcedType(glslang::TBuiltInVariable glslangBuiltIn, const glslang::TType& glslangType) { - switch(builtIn) + switch(glslangBuiltIn) { - case spv::BuiltInSubgroupEqMask: - case spv::BuiltInSubgroupGeMask: - case spv::BuiltInSubgroupGtMask: - case spv::BuiltInSubgroupLeMask: - case spv::BuiltInSubgroupLtMask: { + case glslang::EbvSubGroupEqMask: + case glslang::EbvSubGroupGeMask: + case glslang::EbvSubGroupGtMask: + case glslang::EbvSubGroupLeMask: + case glslang::EbvSubGroupLtMask: { // these require changing a 64-bit scaler -> a vector of 32-bit components if (glslangType.isVector()) break; @@ -2002,6 +2054,15 @@ std::pair TGlslangToSpvTraverser::getForcedType(spv::BuiltIn b builder.makeUintType(64)); return ret; } + // There are no SPIR-V builtins defined for these and map onto original non-transposed + // builtins. During visitBinary we insert a transpose + case glslang::EbvWorldToObject3x4: + case glslang::EbvObjectToWorld3x4: { + spv::Id mat43 = builder.makeMatrixType(builder.makeFloatType(32), 4, 3); + spv::Id mat34 = builder.makeMatrixType(builder.makeFloatType(32), 3, 4); + std::pair ret(mat43, mat34); + return ret; + } default: break; } @@ -2040,7 +2101,15 @@ spv::Id TGlslangToSpvTraverser::translateForcedType(spv::Id object) } else { logger->missingFunctionality("forcing 32-bit vector type to non 64-bit scalar"); } - } else { + } else if (builder.isMatrixType(objectTypeId)) { + // There are no SPIR-V builtins defined for 3x4 variants of ObjectToWorld/WorldToObject + // and we insert a transpose after loading the original non-transposed builtins + builder.clearAccessChain(); + builder.setAccessChainLValue(object); + object = builder.accessChainLoad(spv::NoPrecision, spv::DecorationMax, objectTypeId); + return builder.createUnaryOp(spv::OpTranspose, desiredTypeId, object); + + } else { logger->missingFunctionality("forcing non 32-bit vector type"); } @@ -2091,7 +2160,8 @@ bool TGlslangToSpvTraverser::visitUnary(glslang::TVisit /* visit */, glslang::TI } else { glslang::TIntermTyped* block = node->getOperand()->getAsBinaryNode()->getLeft(); block->traverse(this); - unsigned int member = node->getOperand()->getAsBinaryNode()->getRight()->getAsConstantUnion()->getConstArray()[0].getUConst(); + unsigned int member = node->getOperand()->getAsBinaryNode()->getRight()->getAsConstantUnion() + ->getConstArray()[0].getUConst(); length = builder.createArrayLength(builder.accessChainGetLValue(), member); } @@ -2117,7 +2187,8 @@ bool TGlslangToSpvTraverser::visitUnary(glslang::TVisit /* visit */, glslang::TI // Does it need a swizzle inversion? If so, evaluation is inverted; // operate first on the swizzle base, then apply the swizzle. spv::Id invertedType = spv::NoType; - auto resultType = [&invertedType, &node, this](){ return invertedType != spv::NoType ? invertedType : convertGlslangToSpvType(node->getType()); }; + auto resultType = [&invertedType, &node, this](){ return invertedType != spv::NoType ? + invertedType : convertGlslangToSpvType(node->getType()); }; if (node->getOp() == glslang::EOpInterpolateAtCentroid) invertedType = getInvertedSwizzleType(*node->getOperand()); @@ -2138,7 +2209,15 @@ bool TGlslangToSpvTraverser::visitUnary(glslang::TVisit /* visit */, glslang::TI if (node->getOp() == glslang::EOpAtomicCounterIncrement || node->getOp() == glslang::EOpAtomicCounterDecrement || node->getOp() == glslang::EOpAtomicCounter || - node->getOp() == glslang::EOpInterpolateAtCentroid) { + node->getOp() == glslang::EOpInterpolateAtCentroid || + node->getOp() == glslang::EOpRayQueryProceed || + node->getOp() == glslang::EOpRayQueryGetRayTMin || + node->getOp() == glslang::EOpRayQueryGetRayFlags || + node->getOp() == glslang::EOpRayQueryGetWorldRayOrigin || + node->getOp() == glslang::EOpRayQueryGetWorldRayDirection || + node->getOp() == glslang::EOpRayQueryGetIntersectionCandidateAABBOpaque || + node->getOp() == glslang::EOpRayQueryTerminate || + node->getOp() == glslang::EOpRayQueryConfirmIntersection) { operand = builder.accessChainGetLValue(); // Special case l-value operands lvalueCoherentFlags = builder.getAccessChain().coherentFlags; lvalueCoherentFlags |= TranslateCoherent(operandNode->getAsTyped()->getType()); @@ -2154,11 +2233,13 @@ bool TGlslangToSpvTraverser::visitUnary(glslang::TVisit /* visit */, glslang::TI // it could be a conversion if (! result) - result = createConversion(node->getOp(), decorations, resultType(), operand, node->getOperand()->getBasicType()); + result = createConversion(node->getOp(), decorations, resultType(), operand, + node->getOperand()->getBasicType()); // if not, then possibly an operation if (! result) - result = createUnaryOperation(node->getOp(), decorations, resultType(), operand, node->getOperand()->getBasicType(), lvalueCoherentFlags); + result = createUnaryOperation(node->getOp(), decorations, resultType(), operand, + node->getOperand()->getBasicType(), lvalueCoherentFlags); if (result) { if (invertedType) { @@ -2229,6 +2310,12 @@ bool TGlslangToSpvTraverser::visitUnary(glslang::TVisit /* visit */, glslang::TI case glslang::EOpEndStreamPrimitive: builder.createNoResultOp(spv::OpEndStreamPrimitive, operand); return false; + case glslang::EOpRayQueryTerminate: + builder.createNoResultOp(spv::OpRayQueryTerminateKHR, operand); + return false; + case glslang::EOpRayQueryConfirmIntersection: + builder.createNoResultOp(spv::OpRayQueryConfirmIntersectionKHR, operand); + return false; #endif default: @@ -2251,7 +2338,8 @@ spv::Id TGlslangToSpvTraverser::createCompositeConstruct(spv::Id resultTypeId, s std::vector rTypeConstituents; int numrTypeConstituents = builder.getNumTypeConstituents(rType); for (int i = 0; i < numrTypeConstituents; ++i) { - rTypeConstituents.push_back(builder.createCompositeExtract(constituent, builder.getContainedTypeId(rType, i), i)); + rTypeConstituents.push_back(builder.createCompositeExtract(constituent, + builder.getContainedTypeId(rType, i), i)); } constituents[c] = createCompositeConstruct(lType, rTypeConstituents); } else { @@ -2277,8 +2365,14 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt spec_constant_op_mode_setter.turnOnSpecConstantOpMode(); spv::Id result = spv::NoResult; - spv::Id invertedType = spv::NoType; // to use to override the natural type of the node - auto resultType = [&invertedType, &node, this](){ return invertedType != spv::NoType ? invertedType : convertGlslangToSpvType(node->getType()); }; + spv::Id invertedType = spv::NoType; // to use to override the natural type of the node + std::vector complexLvalues; // for holding swizzling l-values too complex for + // SPIR-V, for an out parameter + std::vector temporaryLvalues; // temporaries to pass, as proxies for complexLValues + + auto resultType = [&invertedType, &node, this](){ return invertedType != spv::NoType ? + invertedType : + convertGlslangToSpvType(node->getType()); }; // try texturing result = createImageTextureFunctionCall(node); @@ -2382,7 +2476,6 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt builder.setLine(node->getLoc().line, node->getLoc().getFilename()); if (node->isUserDefined()) result = handleUserFunctionCall(node); - // assert(result); // this can happen for bad shaders because the call graph completeness checking is not yet done if (result) { builder.clearAccessChain(); builder.setAccessChainRValue(result); @@ -2633,13 +2726,43 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt binOp = node->getOp(); break; - case glslang::EOpIgnoreIntersectionNV: - case glslang::EOpTerminateRayNV: - case glslang::EOpTraceNV: - case glslang::EOpExecuteCallableNV: + case glslang::EOpIgnoreIntersection: + case glslang::EOpTerminateRay: + case glslang::EOpTrace: + case glslang::EOpExecuteCallable: case glslang::EOpWritePackedPrimitiveIndices4x8NV: noReturnValue = true; break; + case glslang::EOpRayQueryInitialize: + case glslang::EOpRayQueryTerminate: + case glslang::EOpRayQueryGenerateIntersection: + case glslang::EOpRayQueryConfirmIntersection: + builder.addExtension("SPV_KHR_ray_query"); + builder.addCapability(spv::CapabilityRayQueryProvisionalKHR); + noReturnValue = true; + break; + case glslang::EOpRayQueryProceed: + case glslang::EOpRayQueryGetIntersectionType: + case glslang::EOpRayQueryGetRayTMin: + case glslang::EOpRayQueryGetRayFlags: + case glslang::EOpRayQueryGetIntersectionT: + case glslang::EOpRayQueryGetIntersectionInstanceCustomIndex: + case glslang::EOpRayQueryGetIntersectionInstanceId: + case glslang::EOpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffset: + case glslang::EOpRayQueryGetIntersectionGeometryIndex: + case glslang::EOpRayQueryGetIntersectionPrimitiveIndex: + case glslang::EOpRayQueryGetIntersectionBarycentrics: + case glslang::EOpRayQueryGetIntersectionFrontFace: + case glslang::EOpRayQueryGetIntersectionCandidateAABBOpaque: + case glslang::EOpRayQueryGetIntersectionObjectRayDirection: + case glslang::EOpRayQueryGetIntersectionObjectRayOrigin: + case glslang::EOpRayQueryGetWorldRayDirection: + case glslang::EOpRayQueryGetWorldRayOrigin: + case glslang::EOpRayQueryGetIntersectionObjectToWorld: + case glslang::EOpRayQueryGetIntersectionWorldToObject: + builder.addExtension("SPV_KHR_ray_query"); + builder.addCapability(spv::CapabilityRayQueryProvisionalKHR); + break; case glslang::EOpCooperativeMatrixLoad: case glslang::EOpCooperativeMatrixStore: noReturnValue = true; @@ -2651,6 +2774,10 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt break; #endif + case glslang::EOpDebugPrintf: + noReturnValue = true; + break; + default: break; } @@ -2702,6 +2829,28 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt lvalue = true; break; + case glslang::EOpRayQueryInitialize: + case glslang::EOpRayQueryTerminate: + case glslang::EOpRayQueryConfirmIntersection: + case glslang::EOpRayQueryProceed: + case glslang::EOpRayQueryGenerateIntersection: + case glslang::EOpRayQueryGetIntersectionType: + case glslang::EOpRayQueryGetIntersectionT: + case glslang::EOpRayQueryGetIntersectionInstanceCustomIndex: + case glslang::EOpRayQueryGetIntersectionInstanceId: + case glslang::EOpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffset: + case glslang::EOpRayQueryGetIntersectionGeometryIndex: + case glslang::EOpRayQueryGetIntersectionPrimitiveIndex: + case glslang::EOpRayQueryGetIntersectionBarycentrics: + case glslang::EOpRayQueryGetIntersectionFrontFace: + case glslang::EOpRayQueryGetIntersectionObjectRayDirection: + case glslang::EOpRayQueryGetIntersectionObjectRayOrigin: + case glslang::EOpRayQueryGetIntersectionObjectToWorld: + case glslang::EOpRayQueryGetIntersectionWorldToObject: + if (arg == 0) + lvalue = true; + break; + case glslang::EOpAtomicAdd: case glslang::EOpAtomicMin: case glslang::EOpAtomicMax: @@ -2727,9 +2876,14 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt // Does it need a swizzle inversion? If so, evaluation is inverted; // operate first on the swizzle base, then apply the swizzle. + // That is, we transform + // + // interpolate(v.zy) -> interpolate(v).zy + // if (glslangOperands[0]->getAsOperator() && glslangOperands[0]->getAsOperator()->getOp() == glslang::EOpVectorSwizzle) - invertedType = convertGlslangToSpvType(glslangOperands[0]->getAsBinaryNode()->getLeft()->getType()); + invertedType = convertGlslangToSpvType( + glslangOperands[0]->getAsBinaryNode()->getLeft()->getType()); } break; case glslang::EOpAtomicLoad: @@ -2789,8 +2943,9 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt builder.setAccessChain(save); // Point to the first element of the array. - builder.accessChainPush(elementId, TranslateCoherent(glslangOperands[arg]->getAsTyped()->getType()), - glslangOperands[arg]->getAsTyped()->getType().getBufferReferenceAlignment()); + builder.accessChainPush(elementId, + TranslateCoherent(glslangOperands[arg]->getAsTyped()->getType()), + glslangOperands[arg]->getAsTyped()->getType().getBufferReferenceAlignment()); spv::Builder::AccessChain::CoherentFlags coherentFlags = builder.getAccessChain().coherentFlags; unsigned int alignment = builder.getAccessChain().alignment; @@ -2800,7 +2955,8 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt memoryAccess &= ~spv::MemoryAccessMakePointerAvailableKHRMask; if (node->getOp() == glslang::EOpCooperativeMatrixStore) memoryAccess &= ~spv::MemoryAccessMakePointerVisibleKHRMask; - if (builder.getStorageClass(builder.getAccessChain().base) == spv::StorageClassPhysicalStorageBufferEXT) { + if (builder.getStorageClass(builder.getAccessChain().base) == + spv::StorageClassPhysicalStorageBufferEXT) { memoryAccess = (spv::MemoryAccessMask)(memoryAccess | spv::MemoryAccessAlignedMask); } @@ -2810,8 +2966,10 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt memoryAccessOperands.push_back(spv::IdImmediate(false, alignment)); } - if (memoryAccess & (spv::MemoryAccessMakePointerAvailableKHRMask | spv::MemoryAccessMakePointerVisibleKHRMask)) { - memoryAccessOperands.push_back(spv::IdImmediate(true, builder.makeUintConstant(TranslateMemoryScope(coherentFlags)))); + if (memoryAccess & + (spv::MemoryAccessMakePointerAvailableKHRMask | spv::MemoryAccessMakePointerVisibleKHRMask)) { + memoryAccessOperands.push_back(spv::IdImmediate(true, + builder.makeUintConstant(TranslateMemoryScope(coherentFlags)))); } } else if (arg == 2) { continue; @@ -2819,13 +2977,48 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt } #endif + // for l-values, pass the address, for r-values, pass the value if (lvalue) { - operands.push_back(builder.accessChainGetLValue()); + if (invertedType == spv::NoType && !builder.isSpvLvalue()) { + // SPIR-V cannot represent an l-value containing a swizzle that doesn't + // reduce to a simple access chain. So, we need a temporary vector to + // receive the result, and must later swizzle that into the original + // l-value. + complexLvalues.push_back(builder.getAccessChain()); + temporaryLvalues.push_back(builder.createVariable( + spv::NoPrecision, spv::StorageClassFunction, + builder.accessChainGetInferredType(), "swizzleTemp")); + operands.push_back(temporaryLvalues.back()); + } else { + operands.push_back(builder.accessChainGetLValue()); + } lvalueCoherentFlags = builder.getAccessChain().coherentFlags; lvalueCoherentFlags |= TranslateCoherent(glslangOperands[arg]->getAsTyped()->getType()); } else { builder.setLine(node->getLoc().line, node->getLoc().getFilename()); - operands.push_back(accessChainLoad(glslangOperands[arg]->getAsTyped()->getType())); + glslang::TOperator glslangOp = node->getOp(); + if (arg == 1 && + (glslangOp == glslang::EOpRayQueryGetIntersectionType || + glslangOp == glslang::EOpRayQueryGetIntersectionT || + glslangOp == glslang::EOpRayQueryGetIntersectionInstanceCustomIndex || + glslangOp == glslang::EOpRayQueryGetIntersectionInstanceId || + glslangOp == glslang::EOpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffset || + glslangOp == glslang::EOpRayQueryGetIntersectionGeometryIndex || + glslangOp == glslang::EOpRayQueryGetIntersectionPrimitiveIndex || + glslangOp == glslang::EOpRayQueryGetIntersectionBarycentrics || + glslangOp == glslang::EOpRayQueryGetIntersectionFrontFace || + glslangOp == glslang::EOpRayQueryGetIntersectionObjectRayDirection || + glslangOp == glslang::EOpRayQueryGetIntersectionObjectRayOrigin || + glslangOp == glslang::EOpRayQueryGetIntersectionObjectToWorld || + glslangOp == glslang::EOpRayQueryGetIntersectionWorldToObject + )) { + bool cond = glslangOperands[arg]->getAsConstantUnion()->getConstArray()[0].getBConst(); + operands.push_back(builder.makeIntConstant(cond ? 1 : 0)); + } + else { + operands.push_back(accessChainLoad(glslangOperands[arg]->getAsTyped()->getType())); + } + } } @@ -2861,7 +3054,14 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt #endif if (atomic) { // Handle all atomics - result = createAtomicOperation(node->getOp(), precision, resultType(), operands, node->getBasicType(), lvalueCoherentFlags); + result = createAtomicOperation(node->getOp(), precision, resultType(), operands, node->getBasicType(), + lvalueCoherentFlags); + } else if (node->getOp() == glslang::EOpDebugPrintf) { + if (!nonSemanticDebugPrintf) { + nonSemanticDebugPrintf = builder.import("NonSemantic.DebugPrintf"); + } + result = builder.createBuiltinCall(builder.makeVoidType(), nonSemanticDebugPrintf, spv::NonSemanticDebugPrintfDebugPrintf, operands); + builder.addExtension(spv::E_SPV_KHR_non_semantic_info); } else { // Pass through to generic operations. switch (glslangOperands.size()) { @@ -2883,8 +3083,14 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt result = createMiscOperation(node->getOp(), precision, resultType(), operands, node->getBasicType()); break; } - if (invertedType) + + if (invertedType != spv::NoResult) result = createInvertedSwizzle(precision, *glslangOperands[0]->getAsBinaryNode(), result); + + for (unsigned int i = 0; i < temporaryLvalues.size(); ++i) { + builder.setAccessChain(complexLvalues[i]); + builder.accessChainStore(builder.createLoad(temporaryLvalues[i], spv::NoPrecision)); + } } if (noReturnValue) @@ -3000,7 +3206,8 @@ bool TGlslangToSpvTraverser::visitSelection(glslang::TVisit /* visit */, glslang } else { // We need control flow to select the result. // TODO: Once SPIR-V OpSelect allows arbitrary types, eliminate this path. - result = builder.createVariable(spv::StorageClassFunction, convertGlslangToSpvType(node->getType())); + result = builder.createVariable(TranslatePrecisionDecoration(node->getType()), + spv::StorageClassFunction, convertGlslangToSpvType(node->getType())); // Selection control: const spv::SelectionControlMask control = TranslateSelectionControl(*node); @@ -3025,8 +3232,10 @@ bool TGlslangToSpvTraverser::visitSelection(glslang::TVisit /* visit */, glslang // Execute the one side needed, as per the condition const auto executeOneSide = [&]() { // Always emit control flow. - if (node->getBasicType() != glslang::EbtVoid) - result = builder.createVariable(spv::StorageClassFunction, convertGlslangToSpvType(node->getType())); + if (node->getBasicType() != glslang::EbtVoid) { + result = builder.createVariable(TranslatePrecisionDecoration(node->getType()), spv::StorageClassFunction, + convertGlslangToSpvType(node->getType())); + } // Selection control: const spv::SelectionControlMask control = TranslateSelectionControl(*node); @@ -3091,7 +3300,8 @@ bool TGlslangToSpvTraverser::visitSwitch(glslang::TVisit /* visit */, glslang::T defaultSegment = (int)codeSegments.size(); else if (child->getAsBranchNode() && child->getAsBranchNode()->getFlowOp() == glslang::EOpCase) { valueIndexToSegment[caseValues.size()] = (int)codeSegments.size(); - caseValues.push_back(child->getAsBranchNode()->getExpression()->getAsConstantUnion()->getConstArray()[0].getIConst()); + caseValues.push_back(child->getAsBranchNode()->getExpression()->getAsConstantUnion() + ->getConstArray()[0].getIConst()); } else codeSegments.push_back(child); } @@ -3104,7 +3314,8 @@ bool TGlslangToSpvTraverser::visitSwitch(glslang::TVisit /* visit */, glslang::T // make the switch statement std::vector segmentBlocks; // returned, as the blocks allocated in the call - builder.makeSwitch(selector, control, (int)codeSegments.size(), caseValues, valueIndexToSegment, defaultSegment, segmentBlocks); + builder.makeSwitch(selector, control, (int)codeSegments.size(), caseValues, valueIndexToSegment, defaultSegment, + segmentBlocks); // emit all the code in the segments breakForLoop.push(false); @@ -3221,15 +3432,17 @@ bool TGlslangToSpvTraverser::visitBranch(glslang::TVisit /* visit */, glslang::T builder.createLoopContinue(); break; case glslang::EOpReturn: - if (node->getExpression()) { + if (node->getExpression() != nullptr) { const glslang::TType& glslangReturnType = node->getExpression()->getType(); spv::Id returnId = accessChainLoad(glslangReturnType); - if (builder.getTypeId(returnId) != currentFunction->getReturnType()) { + if (builder.getTypeId(returnId) != currentFunction->getReturnType() || + TranslatePrecisionDecoration(glslangReturnType) != currentFunction->getReturnPrecision()) { builder.clearAccessChain(); - spv::Id copyId = builder.createVariable(spv::StorageClassFunction, currentFunction->getReturnType()); + spv::Id copyId = builder.createVariable(currentFunction->getReturnPrecision(), + spv::StorageClassFunction, currentFunction->getReturnType()); builder.setAccessChainLValue(copyId); multiTypeStore(glslangReturnType, returnId); - returnId = builder.createLoad(copyId); + returnId = builder.createLoad(copyId, currentFunction->getReturnPrecision()); } builder.makeReturn(false, returnId); } else @@ -3325,7 +3538,18 @@ spv::Id TGlslangToSpvTraverser::createSpvVariable(const glslang::TIntermSymbol* if (glslang::IsAnonymous(name)) name = ""; - return builder.createVariable(storageClass, spvType, name); + spv::Id initializer = spv::NoResult; + + if (node->getType().getQualifier().storage == glslang::EvqUniform && + !node->getConstArray().empty()) { + int nextConst = 0; + initializer = createSpvConstantFromConstUnionArray(node->getType(), + node->getConstArray(), + nextConst, + false /* specConst */); + } + + return builder.createVariable(spv::NoPrecision, storageClass, spvType, name, initializer); } // Return type Id of the sampled type. @@ -3361,7 +3585,8 @@ spv::Id TGlslangToSpvTraverser::getInvertedSwizzleType(const glslang::TIntermTyp // When inverting a swizzle with a parent op, this function // will apply the swizzle operation to a completed parent operation. -spv::Id TGlslangToSpvTraverser::createInvertedSwizzle(spv::Decoration precision, const glslang::TIntermTyped& node, spv::Id parentResult) +spv::Id TGlslangToSpvTraverser::createInvertedSwizzle(spv::Decoration precision, const glslang::TIntermTyped& node, + spv::Id parentResult) { std::vector swizzle; convertSwizzle(*node.getAsBinaryNode()->getRight()->getAsAggregate(), swizzle); @@ -3444,8 +3669,11 @@ spv::Id TGlslangToSpvTraverser::convertGlslangToSpvType(const glslang::TType& ty builder.addCapability(spv::CapabilityAtomicStorage); spvType = builder.makeUintType(32); break; - case glslang::EbtAccStructNV: - spvType = builder.makeAccelerationStructureNVType(); + case glslang::EbtAccStruct: + spvType = builder.makeAccelerationStructureType(); + break; + case glslang::EbtRayQuery: + spvType = builder.makeRayQueryType(); break; case glslang::EbtReference: { @@ -3501,6 +3729,9 @@ spv::Id TGlslangToSpvTraverser::convertGlslangToSpvType(const glslang::TType& ty spvType = convertGlslangStructToSpvType(type, glslangMembers, explicitLayout, qualifier); } break; + case glslang::EbtString: + // no type used for OpString + return 0; default: assert(0); break; @@ -3626,7 +3857,8 @@ spv::Id TGlslangToSpvTraverser::convertGlslangStructToSpvType(const glslang::TTy { // Create a vector of struct types for SPIR-V to consume std::vector spvMembers; - int memberDelta = 0; // how much the member's index changes from glslang to SPIR-V, normally 0, except sometimes for blocks + int memberDelta = 0; // how much the member's index changes from glslang to SPIR-V, normally 0, + // except sometimes for blocks std::vector > deferredForwardPointers; for (int i = 0; i < (int)glslangMembers->size(); i++) { glslang::TType& glslangMember = *(*glslangMembers)[i].type; @@ -3662,10 +3894,12 @@ spv::Id TGlslangToSpvTraverser::convertGlslangStructToSpvType(const glslang::TTy deferredForwardPointers.push_back(std::make_pair(&glslangMember, memberQualifier)); } spvMembers.push_back( - convertGlslangToSpvType(glslangMember, explicitLayout, memberQualifier, lastBufferBlockMember, true)); + convertGlslangToSpvType(glslangMember, explicitLayout, memberQualifier, lastBufferBlockMember, + true)); } else { spvMembers.push_back( - convertGlslangToSpvType(glslangMember, explicitLayout, memberQualifier, lastBufferBlockMember, false)); + convertGlslangToSpvType(glslangMember, explicitLayout, memberQualifier, lastBufferBlockMember, + false)); } } } @@ -3853,11 +4087,11 @@ spv::Id TGlslangToSpvTraverser::accessChainLoad(const glslang::TType& type) alignment |= type.getBufferReferenceAlignment(); spv::Id loadedId = builder.accessChainLoad(TranslatePrecisionDecoration(type), - TranslateNonUniformDecoration(type.getQualifier()), - nominalTypeId, - spv::MemoryAccessMask(TranslateMemoryAccess(coherentFlags) & ~spv::MemoryAccessMakePointerAvailableKHRMask), - TranslateMemoryScope(coherentFlags), - alignment); + TranslateNonUniformDecoration(type.getQualifier()), + nominalTypeId, + spv::MemoryAccessMask(TranslateMemoryAccess(coherentFlags) & ~spv::MemoryAccessMakePointerAvailableKHRMask), + TranslateMemoryScope(coherentFlags), + alignment); // Need to convert to abstract types when necessary if (type.getBasicType() == glslang::EbtBool) { @@ -3871,7 +4105,8 @@ spv::Id TGlslangToSpvTraverser::accessChainLoad(const glslang::TType& type) int vecSize = builder.getNumTypeComponents(nominalTypeId); spv::Id bvecType = builder.makeVectorType(builder.makeBoolType(), vecSize); if (nominalTypeId != bvecType) - loadedId = builder.createBinOp(spv::OpINotEqual, bvecType, loadedId, makeSmearedConstant(builder.makeUintConstant(0), vecSize)); + loadedId = builder.createBinOp(spv::OpINotEqual, bvecType, loadedId, + makeSmearedConstant(builder.makeUintConstant(0), vecSize)); } } @@ -3920,7 +4155,8 @@ void TGlslangToSpvTraverser::accessChainStore(const glslang::TType& type, spv::I alignment |= type.getBufferReferenceAlignment(); builder.accessChainStore(rvalue, - spv::MemoryAccessMask(TranslateMemoryAccess(coherentFlags) & ~spv::MemoryAccessMakePointerVisibleKHRMask), + spv::MemoryAccessMask(TranslateMemoryAccess(coherentFlags) & + ~spv::MemoryAccessMakePointerVisibleKHRMask), TranslateMemoryScope(coherentFlags), alignment); } @@ -3981,7 +4217,8 @@ void TGlslangToSpvTraverser::multiTypeStore(const glslang::TType& type, spv::Id // set up the target storage builder.clearAccessChain(); builder.setAccessChainLValue(lValue); - builder.accessChainPush(builder.makeIntConstant(index), TranslateCoherent(type), type.getBufferReferenceAlignment()); + builder.accessChainPush(builder.makeIntConstant(index), TranslateCoherent(type), + type.getBufferReferenceAlignment()); // store the member multiTypeStore(glslangElementType, elementRValue); @@ -4001,7 +4238,8 @@ void TGlslangToSpvTraverser::multiTypeStore(const glslang::TType& type, spv::Id // set up the target storage builder.clearAccessChain(); builder.setAccessChainLValue(lValue); - builder.accessChainPush(builder.makeIntConstant(m), TranslateCoherent(type), type.getBufferReferenceAlignment()); + builder.accessChainPush(builder.makeIntConstant(m), TranslateCoherent(type), + type.getBufferReferenceAlignment()); // store the member multiTypeStore(glslangMemberType, memberRValue); @@ -4036,18 +4274,21 @@ glslang::TLayoutPacking TGlslangToSpvTraverser::getExplicitLayout(const glslang: } // Given an array type, returns the integer stride required for that array -int TGlslangToSpvTraverser::getArrayStride(const glslang::TType& arrayType, glslang::TLayoutPacking explicitLayout, glslang::TLayoutMatrix matrixLayout) +int TGlslangToSpvTraverser::getArrayStride(const glslang::TType& arrayType, glslang::TLayoutPacking explicitLayout, + glslang::TLayoutMatrix matrixLayout) { int size; int stride; - glslangIntermediate->getMemberAlignment(arrayType, size, stride, explicitLayout, matrixLayout == glslang::ElmRowMajor); + glslangIntermediate->getMemberAlignment(arrayType, size, stride, explicitLayout, + matrixLayout == glslang::ElmRowMajor); return stride; } // Given a matrix type, or array (of array) of matrixes type, returns the integer stride required for that matrix // when used as a member of an interface block -int TGlslangToSpvTraverser::getMatrixStride(const glslang::TType& matrixType, glslang::TLayoutPacking explicitLayout, glslang::TLayoutMatrix matrixLayout) +int TGlslangToSpvTraverser::getMatrixStride(const glslang::TType& matrixType, glslang::TLayoutPacking explicitLayout, + glslang::TLayoutMatrix matrixLayout) { glslang::TType elementType; elementType.shallowCopy(matrixType); @@ -4055,7 +4296,8 @@ int TGlslangToSpvTraverser::getMatrixStride(const glslang::TType& matrixType, gl int size; int stride; - glslangIntermediate->getMemberAlignment(elementType, size, stride, explicitLayout, matrixLayout == glslang::ElmRowMajor); + glslangIntermediate->getMemberAlignment(elementType, size, stride, explicitLayout, + matrixLayout == glslang::ElmRowMajor); return stride; } @@ -4066,8 +4308,8 @@ int TGlslangToSpvTraverser::getMatrixStride(const glslang::TType& matrixType, gl // 'currentOffset' should be passed in already initialized, ready to modify, and reflecting // the migration of data from nextOffset -> currentOffset. It should be -1 on the first call. // -1 means a non-forced member offset (no decoration needed). -void TGlslangToSpvTraverser::updateMemberOffset(const glslang::TType& structType, const glslang::TType& memberType, int& currentOffset, int& nextOffset, - glslang::TLayoutPacking explicitLayout, glslang::TLayoutMatrix matrixLayout) +void TGlslangToSpvTraverser::updateMemberOffset(const glslang::TType& structType, const glslang::TType& memberType, + int& currentOffset, int& nextOffset, glslang::TLayoutPacking explicitLayout, glslang::TLayoutMatrix matrixLayout) { // this will get a positive value when deemed necessary nextOffset = -1; @@ -4097,7 +4339,8 @@ void TGlslangToSpvTraverser::updateMemberOffset(const glslang::TType& structType int memberSize; int dummyStride; - int memberAlignment = glslangIntermediate->getMemberAlignment(memberType, memberSize, dummyStride, explicitLayout, matrixLayout == glslang::ElmRowMajor); + int memberAlignment = glslangIntermediate->getMemberAlignment(memberType, memberSize, dummyStride, explicitLayout, + matrixLayout == glslang::ElmRowMajor); // Adjust alignment for HLSL rules // TODO: make this consistent in early phases of code: @@ -4116,7 +4359,8 @@ void TGlslangToSpvTraverser::updateMemberOffset(const glslang::TType& structType glslang::RoundToPow2(currentOffset, memberAlignment); // Bump up to vec4 if there is a bad straddle - if (explicitLayout != glslang::ElpScalar && glslangIntermediate->improperStraddle(memberType, memberSize, currentOffset)) + if (explicitLayout != glslang::ElpScalar && glslangIntermediate->improperStraddle(memberType, memberSize, + currentOffset)) glslang::RoundToPow2(currentOffset, 16); nextOffset = currentOffset + memberSize; @@ -4169,8 +4413,10 @@ bool TGlslangToSpvTraverser::writableParam(glslang::TStorageQualifier qualifier) assert(qualifier == glslang::EvqIn || qualifier == glslang::EvqOut || qualifier == glslang::EvqInOut || + qualifier == glslang::EvqUniform || qualifier == glslang::EvqConstReadOnly); - return qualifier != glslang::EvqConstReadOnly; + return qualifier != glslang::EvqConstReadOnly && + qualifier != glslang::EvqUniform; } // Is parameter pass-by-original? @@ -4188,7 +4434,8 @@ bool TGlslangToSpvTraverser::originalParam(glslang::TStorageQualifier qualifier, // Make all the functions, skeletally, without actually visiting their bodies. void TGlslangToSpvTraverser::makeFunctions(const glslang::TIntermSequence& glslFunctions) { - const auto getParamDecorations = [&](std::vector& decorations, const glslang::TType& type, bool useVulkanMemoryModel) { + const auto getParamDecorations = [&](std::vector& decorations, const glslang::TType& type, + bool useVulkanMemoryModel) { spv::Decoration paramPrecision = TranslatePrecisionDecoration(type); if (paramPrecision != spv::NoPrecision) decorations.push_back(paramPrecision); @@ -4286,7 +4533,8 @@ void TGlslangToSpvTraverser::makeGlobalInitializers(const glslang::TIntermSequen builder.setBuildPoint(shaderEntry->getLastBlock()); for (int i = 0; i < (int)initializers.size(); ++i) { glslang::TIntermAggregate* initializer = initializers[i]->getAsAggregate(); - if (initializer && initializer->getOp() != glslang::EOpFunction && initializer->getOp() != glslang::EOpLinkerObjects) { + if (initializer && initializer->getOp() != glslang::EOpFunction && initializer->getOp() != + glslang::EOpLinkerObjects) { // We're on a top-level node that's not a function. Treat as an initializer, whose // code goes into the beginning of the entry point. @@ -4314,7 +4562,8 @@ void TGlslangToSpvTraverser::handleFunctionEntry(const glslang::TIntermAggregate builder.setBuildPoint(functionBlock); } -void TGlslangToSpvTraverser::translateArguments(const glslang::TIntermAggregate& node, std::vector& arguments, spv::Builder::AccessChain::CoherentFlags &lvalueCoherentFlags) +void TGlslangToSpvTraverser::translateArguments(const glslang::TIntermAggregate& node, std::vector& arguments, + spv::Builder::AccessChain::CoherentFlags &lvalueCoherentFlags) { const glslang::TIntermSequence& glslangArguments = node.getSequence(); @@ -4327,7 +4576,8 @@ void TGlslangToSpvTraverser::translateArguments(const glslang::TIntermAggregate& sampler = glslangArguments[0]->getAsTyped()->getType().getSampler(); cubeCompare = sampler.dim == glslang::EsdCube && sampler.arrayed && sampler.shadow; #ifndef GLSLANG_WEB - f16ShadowCompare = sampler.shadow && glslangArguments[1]->getAsTyped()->getType().getBasicType() == glslang::EbtFloat16; + f16ShadowCompare = sampler.shadow && + glslangArguments[1]->getAsTyped()->getType().getBasicType() == glslang::EbtFloat16; #endif } @@ -4490,7 +4740,7 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO translateArguments(*node->getAsAggregate(), arguments, lvalueCoherentFlags); else translateArguments(*node->getAsUnaryNode(), arguments); - spv::Decoration precision = TranslatePrecisionDecoration(node->getOperationPrecision()); + spv::Decoration precision = TranslatePrecisionDecoration(node->getType()); spv::Builder::TextureParameters params = { }; params.sampler = arguments[0]; @@ -4703,7 +4953,8 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO operands.push_back(imageOperand); } if (mask & spv::ImageOperandsMakeTexelVisibleKHRMask) { - spv::IdImmediate imageOperand = { true, builder.makeUintConstant(TranslateMemoryScope(TranslateCoherent(imageType))) }; + spv::IdImmediate imageOperand = { true, builder.makeUintConstant(TranslateMemoryScope( + TranslateCoherent(imageType))) }; operands.push_back(imageOperand); } @@ -4731,18 +4982,22 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO // imageAtomicStore has a void return type so base the pointer type on // the type of the value operand. if (node->getOp() == glslang::EOpImageAtomicStore) { - resultTypeId = builder.makePointer(spv::StorageClassImage, builder.getTypeId(operands[2].word)); + resultTypeId = builder.makePointer(spv::StorageClassImage, builder.getTypeId(*opIt)); } else { resultTypeId = builder.makePointer(spv::StorageClassImage, resultType()); } spv::Id pointer = builder.createOp(spv::OpImageTexelPointer, resultTypeId, operands); + if (imageType.getQualifier().nonUniform) { + builder.addDecoration(pointer, spv::DecorationNonUniformEXT); + } std::vector operands; operands.push_back(pointer); for (; opIt != arguments.end(); ++opIt) operands.push_back(*opIt); - return createAtomicOperation(node->getOp(), precision, resultType(), operands, node->getBasicType(), lvalueCoherentFlags); + return createAtomicOperation(node->getOp(), precision, resultType(), operands, node->getBasicType(), + lvalueCoherentFlags); } } @@ -4767,7 +5022,8 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO std::vector comps; comps.push_back(zero); comps.push_back(zero); - operands.push_back(builder.makeCompositeConstant(builder.makeVectorType(builder.makeIntType(32), 2), comps)); + operands.push_back(builder.makeCompositeConstant( + builder.makeVectorType(builder.makeIntType(32), 2), comps)); } for (; opIt != arguments.end(); ++opIt) @@ -4856,7 +5112,8 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO else dRefComp = builder.getNumComponents(params.coords) - 1; indexes.push_back(dRefComp); - params.Dref = builder.createCompositeExtract(params.coords, builder.getScalarTypeId(builder.getTypeId(params.coords)), indexes); + params.Dref = builder.createCompositeExtract(params.coords, + builder.getScalarTypeId(builder.getTypeId(params.coords)), indexes); } // lod @@ -4981,7 +5238,8 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO flags.clear(); builder.accessChainPush(builder.makeIntConstant(i), flags, 0); - builder.accessChainStore(builder.createCompositeExtract(res, builder.getContainedTypeId(resType, i+1), i+1)); + builder.accessChainStore(builder.createCompositeExtract(res, builder.getContainedTypeId(resType, i+1), + i+1)); } return builder.createCompositeExtract(res, resultType(), 0); } @@ -5004,10 +5262,9 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO // copy the projective coordinate if we have to if (projTargetComp != projSourceComp) { spv::Id projComp = builder.createCompositeExtract(params.coords, - builder.getScalarTypeId(builder.getTypeId(params.coords)), - projSourceComp); + builder.getScalarTypeId(builder.getTypeId(params.coords)), projSourceComp); params.coords = builder.createCompositeInsert(projComp, params.coords, - builder.getTypeId(params.coords), projTargetComp); + builder.getTypeId(params.coords), projTargetComp); } } @@ -5087,7 +5344,8 @@ spv::Id TGlslangToSpvTraverser::handleUserFunctionCall(const glslang::TIntermAgg ++lValueCount; } else if (writableParam(qualifiers[a])) { // need space to hold the copy - arg = builder.createVariable(spv::StorageClassFunction, builder.getContainedTypeId(function->getParamType(a)), "param"); + arg = builder.createVariable(function->getParamPrecision(a), spv::StorageClassFunction, + builder.getContainedTypeId(function->getParamType(a)), "param"); if (qualifiers[a] == glslang::EvqIn || qualifiers[a] == glslang::EvqInOut) { // need to copy the input into output space builder.setAccessChain(lValues[lValueCount]); @@ -5099,12 +5357,14 @@ spv::Id TGlslangToSpvTraverser::handleUserFunctionCall(const glslang::TIntermAgg ++lValueCount; } else { // process r-value, which involves a copy for a type mismatch - if (function->getParamType(a) != convertGlslangToSpvType(*argTypes[a])) { - spv::Id argCopy = builder.createVariable(spv::StorageClassFunction, function->getParamType(a), "arg"); + if (function->getParamType(a) != convertGlslangToSpvType(*argTypes[a]) || + TranslatePrecisionDecoration(*argTypes[a]) != function->getParamPrecision(a)) + { + spv::Id argCopy = builder.createVariable(function->getParamPrecision(a), spv::StorageClassFunction, function->getParamType(a), "arg"); builder.clearAccessChain(); builder.setAccessChainLValue(argCopy); multiTypeStore(*argTypes[a], rValues[rValueCount]); - arg = builder.createLoad(argCopy); + arg = builder.createLoad(argCopy, function->getParamPrecision(a)); } else arg = rValues[rValueCount]; ++rValueCount; @@ -5123,7 +5383,7 @@ spv::Id TGlslangToSpvTraverser::handleUserFunctionCall(const glslang::TIntermAgg ++lValueCount; else if (writableParam(qualifiers[a])) { if (qualifiers[a] == glslang::EvqOut || qualifiers[a] == glslang::EvqInOut) { - spv::Id copy = builder.createLoad(spvArgs[a]); + spv::Id copy = builder.createLoad(spvArgs[a], spv::NoPrecision); builder.setAccessChain(lValues[lValueCount]); multiTypeStore(*argTypes[a], copy); } @@ -5368,7 +5628,7 @@ spv::Id TGlslangToSpvTraverser::createBinaryOperation(glslang::TOperator op, OpD case glslang::EOpNotEqual: case glslang::EOpVectorNotEqual: if (isFloat) - binOp = spv::OpFOrdNotEqual; + binOp = spv::OpFUnordNotEqual; else if (isBool) binOp = spv::OpLogicalNotEqual; else @@ -5502,7 +5762,7 @@ spv::Id TGlslangToSpvTraverser::createBinaryMatrixOperation(spv::Op op, OpDecora } spv::Id TGlslangToSpvTraverser::createUnaryOperation(glslang::TOperator op, OpDecorations& decorations, spv::Id typeId, - spv::Id operand, glslang::TBasicType typeProxy, const spv::Builder::AccessChain::CoherentFlags &lvalueCoherentFlags) + spv::Id operand, glslang::TBasicType typeProxy, const spv::Builder::AccessChain::CoherentFlags &lvalueCoherentFlags) { spv::Op unaryOp = spv::OpNop; int extBuiltins = -1; @@ -5763,6 +6023,24 @@ spv::Id TGlslangToSpvTraverser::createUnaryOperation(glslang::TOperator op, OpDe case glslang::EOpFwidthCoarse: unaryOp = spv::OpFwidthCoarse; break; + case glslang::EOpRayQueryProceed: + unaryOp = spv::OpRayQueryProceedKHR; + break; + case glslang::EOpRayQueryGetRayTMin: + unaryOp = spv::OpRayQueryGetRayTMinKHR; + break; + case glslang::EOpRayQueryGetRayFlags: + unaryOp = spv::OpRayQueryGetRayFlagsKHR; + break; + case glslang::EOpRayQueryGetWorldRayOrigin: + unaryOp = spv::OpRayQueryGetWorldRayOriginKHR; + break; + case glslang::EOpRayQueryGetWorldRayDirection: + unaryOp = spv::OpRayQueryGetWorldRayDirectionKHR; + break; + case glslang::EOpRayQueryGetIntersectionCandidateAABBOpaque: + unaryOp = spv::OpRayQueryGetIntersectionCandidateAABBOpaqueKHR; + break; case glslang::EOpInterpolateAtCentroid: if (typeProxy == glslang::EbtFloat16) builder.addExtension(spv::E_SPV_AMD_gpu_shader_half_float); @@ -6048,7 +6326,7 @@ spv::Id TGlslangToSpvTraverser::createConversion(glslang::TOperator op, OpDecora case glslang::EOpConvFloatToBool: zero = builder.makeFloatConstant(0.0F); zero = makeSmearedConstant(zero, vectorSize); - return builder.createBinOp(spv::OpFOrdNotEqual, destType, operand, zero); + return builder.createBinOp(spv::OpFUnordNotEqual, destType, operand, zero); case glslang::EOpConvBoolToFloat: convOp = spv::OpSelect; zero = builder.makeFloatConstant(0.0F); @@ -6197,11 +6475,11 @@ spv::Id TGlslangToSpvTraverser::createConversion(glslang::TOperator op, OpDecora case glslang::EOpConvDoubleToBool: zero = builder.makeDoubleConstant(0.0); zero = makeSmearedConstant(zero, vectorSize); - return builder.createBinOp(spv::OpFOrdNotEqual, destType, operand, zero); + return builder.createBinOp(spv::OpFUnordNotEqual, destType, operand, zero); case glslang::EOpConvFloat16ToBool: zero = builder.makeFloat16Constant(0.0F); zero = makeSmearedConstant(zero, vectorSize); - return builder.createBinOp(spv::OpFOrdNotEqual, destType, operand, zero); + return builder.createBinOp(spv::OpFUnordNotEqual, destType, operand, zero); case glslang::EOpConvBoolToDouble: convOp = spv::OpSelect; zero = builder.makeDoubleConstant(0.0); @@ -6395,7 +6673,9 @@ spv::Id TGlslangToSpvTraverser::makeSmearedConstant(spv::Id constant, int vector } // For glslang ops that map to SPV atomic opCodes -spv::Id TGlslangToSpvTraverser::createAtomicOperation(glslang::TOperator op, spv::Decoration /*precision*/, spv::Id typeId, std::vector& operands, glslang::TBasicType typeProxy, const spv::Builder::AccessChain::CoherentFlags &lvalueCoherentFlags) +spv::Id TGlslangToSpvTraverser::createAtomicOperation(glslang::TOperator op, spv::Decoration /*precision*/, + spv::Id typeId, std::vector& operands, glslang::TBasicType typeProxy, + const spv::Builder::AccessChain::CoherentFlags &lvalueCoherentFlags) { spv::Op opCode = spv::OpNop; @@ -6404,6 +6684,14 @@ spv::Id TGlslangToSpvTraverser::createAtomicOperation(glslang::TOperator op, spv case glslang::EOpImageAtomicAdd: case glslang::EOpAtomicCounterAdd: opCode = spv::OpAtomicIAdd; + if (typeProxy == glslang::EbtFloat || typeProxy == glslang::EbtDouble) { + opCode = spv::OpAtomicFAddEXT; + builder.addExtension(spv::E_SPV_EXT_shader_atomic_float_add); + if (typeProxy == glslang::EbtFloat) + builder.addCapability(spv::CapabilityAtomicFloat32AddEXT); + else + builder.addCapability(spv::CapabilityAtomicFloat64AddEXT); + } break; case glslang::EOpAtomicCounterSubtract: opCode = spv::OpAtomicISub; @@ -6411,12 +6699,14 @@ spv::Id TGlslangToSpvTraverser::createAtomicOperation(glslang::TOperator op, spv case glslang::EOpAtomicMin: case glslang::EOpImageAtomicMin: case glslang::EOpAtomicCounterMin: - opCode = (typeProxy == glslang::EbtUint || typeProxy == glslang::EbtUint64) ? spv::OpAtomicUMin : spv::OpAtomicSMin; + opCode = (typeProxy == glslang::EbtUint || typeProxy == glslang::EbtUint64) ? + spv::OpAtomicUMin : spv::OpAtomicSMin; break; case glslang::EOpAtomicMax: case glslang::EOpImageAtomicMax: case glslang::EOpAtomicCounterMax: - opCode = (typeProxy == glslang::EbtUint || typeProxy == glslang::EbtUint64) ? spv::OpAtomicUMax : spv::OpAtomicSMax; + opCode = (typeProxy == glslang::EbtUint || typeProxy == glslang::EbtUint64) ? + spv::OpAtomicUMax : spv::OpAtomicSMax; break; case glslang::EOpAtomicAnd: case glslang::EOpImageAtomicAnd: @@ -6481,7 +6771,8 @@ spv::Id TGlslangToSpvTraverser::createAtomicOperation(glslang::TOperator op, spv scopeId = builder.makeUintConstant(spv::ScopeDevice); } // semantics default to relaxed - spv::Id semanticsId = builder.makeUintConstant(lvalueCoherentFlags.isVolatile() && glslangIntermediate->usingVulkanMemoryModel() ? + spv::Id semanticsId = builder.makeUintConstant(lvalueCoherentFlags.isVolatile() && + glslangIntermediate->usingVulkanMemoryModel() ? spv::MemorySemanticsVolatileMask : spv::MemorySemanticsMaskNone); spv::Id semanticsId2 = semanticsId; @@ -6494,20 +6785,24 @@ spv::Id TGlslangToSpvTraverser::createAtomicOperation(glslang::TOperator op, spv valueId = operands[2]; if (operands.size() > 3) { scopeId = operands[3]; - semanticsId = builder.makeUintConstant(builder.getConstantScalar(operands[4]) | builder.getConstantScalar(operands[5])); - semanticsId2 = builder.makeUintConstant(builder.getConstantScalar(operands[6]) | builder.getConstantScalar(operands[7])); + semanticsId = builder.makeUintConstant( + builder.getConstantScalar(operands[4]) | builder.getConstantScalar(operands[5])); + semanticsId2 = builder.makeUintConstant( + builder.getConstantScalar(operands[6]) | builder.getConstantScalar(operands[7])); } } else if (opCode == spv::OpAtomicLoad) { if (operands.size() > 1) { scopeId = operands[1]; - semanticsId = builder.makeUintConstant(builder.getConstantScalar(operands[2]) | builder.getConstantScalar(operands[3])); + semanticsId = builder.makeUintConstant( + builder.getConstantScalar(operands[2]) | builder.getConstantScalar(operands[3])); } } else { // atomic store or RMW valueId = operands[1]; if (operands.size() > 2) { scopeId = operands[2]; - semanticsId = builder.makeUintConstant(builder.getConstantScalar(operands[3]) | builder.getConstantScalar(operands[4])); + semanticsId = builder.makeUintConstant + (builder.getConstantScalar(operands[3]) | builder.getConstantScalar(operands[4])); } } @@ -6552,7 +6847,8 @@ spv::Id TGlslangToSpvTraverser::createAtomicOperation(glslang::TOperator op, spv } // Create group invocation operations. -spv::Id TGlslangToSpvTraverser::createInvocationsOperation(glslang::TOperator op, spv::Id typeId, std::vector& operands, glslang::TBasicType typeProxy) +spv::Id TGlslangToSpvTraverser::createInvocationsOperation(glslang::TOperator op, spv::Id typeId, + std::vector& operands, glslang::TBasicType typeProxy) { bool isUnsigned = isTypeUnsignedInt(typeProxy); bool isFloat = isTypeFloat(typeProxy); @@ -6766,8 +7062,10 @@ spv::Id TGlslangToSpvTraverser::CreateInvocationsVectorOperation(spv::Op op, spv op == spv::OpGroupFMax || op == spv::OpGroupUMax || op == spv::OpGroupSMax || op == spv::OpGroupFAdd || op == spv::OpGroupIAdd || op == spv::OpGroupBroadcast || op == spv::OpSubgroupReadInvocationKHR || - op == spv::OpGroupFMinNonUniformAMD || op == spv::OpGroupUMinNonUniformAMD || op == spv::OpGroupSMinNonUniformAMD || - op == spv::OpGroupFMaxNonUniformAMD || op == spv::OpGroupUMaxNonUniformAMD || op == spv::OpGroupSMaxNonUniformAMD || + op == spv::OpGroupFMinNonUniformAMD || op == spv::OpGroupUMinNonUniformAMD || + op == spv::OpGroupSMinNonUniformAMD || + op == spv::OpGroupFMaxNonUniformAMD || op == spv::OpGroupUMaxNonUniformAMD || + op == spv::OpGroupSMaxNonUniformAMD || op == spv::OpGroupFAddNonUniformAMD || op == spv::OpGroupIAddNonUniformAMD); // Handle group invocation operations scalar by scalar. @@ -7156,7 +7454,8 @@ spv::Id TGlslangToSpvTraverser::createSubgroupOperation(glslang::TOperator op, s return builder.createOp(opCode, typeId, spvGroupOperands); } -spv::Id TGlslangToSpvTraverser::createMiscOperation(glslang::TOperator op, spv::Decoration precision, spv::Id typeId, std::vector& operands, glslang::TBasicType typeProxy) +spv::Id TGlslangToSpvTraverser::createMiscOperation(glslang::TOperator op, spv::Decoration precision, + spv::Id typeId, std::vector& operands, glslang::TBasicType typeProxy) { bool isUnsigned = isTypeUnsignedInt(typeProxy); bool isFloat = isTypeFloat(typeProxy); @@ -7258,14 +7557,16 @@ spv::Id TGlslangToSpvTraverser::createMiscOperation(glslang::TOperator op, spv:: unsigned int executionScope = builder.getConstantScalar(operands[0]); unsigned int memoryScope = builder.getConstantScalar(operands[1]); unsigned int semantics = builder.getConstantScalar(operands[2]) | builder.getConstantScalar(operands[3]); - builder.createControlBarrier((spv::Scope)executionScope, (spv::Scope)memoryScope, (spv::MemorySemanticsMask)semantics); + builder.createControlBarrier((spv::Scope)executionScope, (spv::Scope)memoryScope, + (spv::MemorySemanticsMask)semantics); if (semantics & (spv::MemorySemanticsMakeAvailableKHRMask | spv::MemorySemanticsMakeVisibleKHRMask | spv::MemorySemanticsOutputMemoryKHRMask | spv::MemorySemanticsVolatileMask)) { builder.addCapability(spv::CapabilityVulkanMemoryModelKHR); } - if (glslangIntermediate->usingVulkanMemoryModel() && (executionScope == spv::ScopeDevice || memoryScope == spv::ScopeDevice)) { + if (glslangIntermediate->usingVulkanMemoryModel() && (executionScope == spv::ScopeDevice || + memoryScope == spv::ScopeDevice)) { builder.addCapability(spv::CapabilityVulkanMemoryModelDeviceScopeKHR); } return 0; @@ -7348,7 +7649,8 @@ spv::Id TGlslangToSpvTraverser::createMiscOperation(glslang::TOperator op, spv:: if (builder.getNumComponents(operands[0]) == 1) frexpIntType = builder.makeIntegerType(width, true); else - frexpIntType = builder.makeVectorType(builder.makeIntegerType(width, true), builder.getNumComponents(operands[0])); + frexpIntType = builder.makeVectorType(builder.makeIntegerType(width, true), + builder.getNumComponents(operands[0])); typeId = builder.makeStructResultType(typeId0, frexpIntType); consumedOperands = 1; } @@ -7451,24 +7753,105 @@ spv::Id TGlslangToSpvTraverser::createMiscOperation(glslang::TOperator op, spv:: libCall = spv::InterpolateAtVertexAMD; break; - case glslang::EOpReportIntersectionNV: - { + case glslang::EOpReportIntersection: typeId = builder.makeBoolType(); - opCode = spv::OpReportIntersectionNV; - } - break; - case glslang::EOpTraceNV: - { - builder.createNoResultOp(spv::OpTraceNV, operands); + opCode = spv::OpReportIntersectionKHR; + break; + case glslang::EOpTrace: + builder.createNoResultOp(spv::OpTraceRayKHR, operands); return 0; - } - break; - case glslang::EOpExecuteCallableNV: - { - builder.createNoResultOp(spv::OpExecuteCallableNV, operands); + case glslang::EOpExecuteCallable: + builder.createNoResultOp(spv::OpExecuteCallableKHR, operands); return 0; - } - break; + + case glslang::EOpRayQueryInitialize: + builder.createNoResultOp(spv::OpRayQueryInitializeKHR, operands); + return 0; + case glslang::EOpRayQueryTerminate: + builder.createNoResultOp(spv::OpRayQueryTerminateKHR, operands); + return 0; + case glslang::EOpRayQueryGenerateIntersection: + builder.createNoResultOp(spv::OpRayQueryGenerateIntersectionKHR, operands); + return 0; + case glslang::EOpRayQueryConfirmIntersection: + builder.createNoResultOp(spv::OpRayQueryConfirmIntersectionKHR, operands); + return 0; + case glslang::EOpRayQueryProceed: + typeId = builder.makeBoolType(); + opCode = spv::OpRayQueryProceedKHR; + break; + case glslang::EOpRayQueryGetIntersectionType: + typeId = builder.makeUintType(32); + opCode = spv::OpRayQueryGetIntersectionTypeKHR; + break; + case glslang::EOpRayQueryGetRayTMin: + typeId = builder.makeFloatType(32); + opCode = spv::OpRayQueryGetRayTMinKHR; + break; + case glslang::EOpRayQueryGetRayFlags: + typeId = builder.makeIntType(32); + opCode = spv::OpRayQueryGetRayFlagsKHR; + break; + case glslang::EOpRayQueryGetIntersectionT: + typeId = builder.makeFloatType(32); + opCode = spv::OpRayQueryGetIntersectionTKHR; + break; + case glslang::EOpRayQueryGetIntersectionInstanceCustomIndex: + typeId = builder.makeIntType(32); + opCode = spv::OpRayQueryGetIntersectionInstanceCustomIndexKHR; + break; + case glslang::EOpRayQueryGetIntersectionInstanceId: + typeId = builder.makeIntType(32); + opCode = spv::OpRayQueryGetIntersectionInstanceIdKHR; + break; + case glslang::EOpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffset: + typeId = builder.makeIntType(32); + opCode = spv::OpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffsetKHR; + break; + case glslang::EOpRayQueryGetIntersectionGeometryIndex: + typeId = builder.makeIntType(32); + opCode = spv::OpRayQueryGetIntersectionGeometryIndexKHR; + break; + case glslang::EOpRayQueryGetIntersectionPrimitiveIndex: + typeId = builder.makeIntType(32); + opCode = spv::OpRayQueryGetIntersectionPrimitiveIndexKHR; + break; + case glslang::EOpRayQueryGetIntersectionBarycentrics: + typeId = builder.makeVectorType(builder.makeFloatType(32), 2); + opCode = spv::OpRayQueryGetIntersectionBarycentricsKHR; + break; + case glslang::EOpRayQueryGetIntersectionFrontFace: + typeId = builder.makeBoolType(); + opCode = spv::OpRayQueryGetIntersectionFrontFaceKHR; + break; + case glslang::EOpRayQueryGetIntersectionCandidateAABBOpaque: + typeId = builder.makeBoolType(); + opCode = spv::OpRayQueryGetIntersectionCandidateAABBOpaqueKHR; + break; + case glslang::EOpRayQueryGetIntersectionObjectRayDirection: + typeId = builder.makeVectorType(builder.makeFloatType(32), 3); + opCode = spv::OpRayQueryGetIntersectionObjectRayDirectionKHR; + break; + case glslang::EOpRayQueryGetIntersectionObjectRayOrigin: + typeId = builder.makeVectorType(builder.makeFloatType(32), 3); + opCode = spv::OpRayQueryGetIntersectionObjectRayOriginKHR; + break; + case glslang::EOpRayQueryGetWorldRayDirection: + typeId = builder.makeVectorType(builder.makeFloatType(32), 3); + opCode = spv::OpRayQueryGetWorldRayDirectionKHR; + break; + case glslang::EOpRayQueryGetWorldRayOrigin: + typeId = builder.makeVectorType(builder.makeFloatType(32), 3); + opCode = spv::OpRayQueryGetWorldRayOriginKHR; + break; + case glslang::EOpRayQueryGetIntersectionObjectToWorld: + typeId = builder.makeMatrixType(builder.makeFloatType(32), 4, 3); + opCode = spv::OpRayQueryGetIntersectionObjectToWorldKHR; + break; + case glslang::EOpRayQueryGetIntersectionWorldToObject: + typeId = builder.makeMatrixType(builder.makeFloatType(32), 4, 3); + opCode = spv::OpRayQueryGetIntersectionWorldToObjectKHR; + break; case glslang::EOpWritePackedPrimitiveIndices4x8NV: builder.createNoResultOp(spv::OpWritePackedPrimitiveIndices4x8NV, operands); return 0; @@ -7558,7 +7941,8 @@ spv::Id TGlslangToSpvTraverser::createMiscOperation(glslang::TOperator op, spv:: spv::Id TGlslangToSpvTraverser::createNoArgOperation(glslang::TOperator op, spv::Decoration precision, spv::Id typeId) { // GLSL memory barriers use queuefamily scope in new model, device scope in old model - spv::Scope memoryBarrierScope = glslangIntermediate->usingVulkanMemoryModel() ? spv::ScopeQueueFamilyKHR : spv::ScopeDevice; + spv::Scope memoryBarrierScope = glslangIntermediate->usingVulkanMemoryModel() ? + spv::ScopeQueueFamilyKHR : spv::ScopeDevice; switch (op) { case glslang::EOpBarrier: @@ -7664,13 +8048,24 @@ spv::Id TGlslangToSpvTraverser::createNoArgOperation(glslang::TOperator op, spv: spv::Id id = builder.createBuiltinCall(typeId, getExtBuiltins(spv::E_SPV_AMD_gcn_shader), spv::TimeAMD, args); return builder.setPrecision(id, precision); } - case glslang::EOpIgnoreIntersectionNV: - builder.createNoResultOp(spv::OpIgnoreIntersectionNV); + case glslang::EOpIgnoreIntersection: + builder.createNoResultOp(spv::OpIgnoreIntersectionKHR); return 0; - case glslang::EOpTerminateRayNV: - builder.createNoResultOp(spv::OpTerminateRayNV); + case glslang::EOpTerminateRay: + builder.createNoResultOp(spv::OpTerminateRayKHR); + return 0; + case glslang::EOpRayQueryInitialize: + builder.createNoResultOp(spv::OpRayQueryInitializeKHR); + return 0; + case glslang::EOpRayQueryTerminate: + builder.createNoResultOp(spv::OpRayQueryTerminateKHR); + return 0; + case glslang::EOpRayQueryGenerateIntersection: + builder.createNoResultOp(spv::OpRayQueryGenerateIntersectionKHR); + return 0; + case glslang::EOpRayQueryConfirmIntersection: + builder.createNoResultOp(spv::OpRayQueryConfirmIntersectionKHR); return 0; - case glslang::EOpBeginInvocationInterlock: builder.createNoResultOp(spv::OpBeginInvocationInterlockEXT); return 0; @@ -7722,7 +8117,7 @@ spv::Id TGlslangToSpvTraverser::getSymbolId(const glslang::TIntermSymbol* symbol // it was not found, create it spv::BuiltIn builtIn = TranslateBuiltInDecoration(symbol->getQualifier().builtIn, false); - auto forcedType = getForcedType(builtIn, symbol->getType()); + auto forcedType = getForcedType(symbol->getQualifier().builtIn, symbol->getType()); id = createSpvVariable(symbol, forcedType.first); symbolValues[symbol->getId()] = id; if (forcedType.second != spv::NoType) @@ -7787,7 +8182,8 @@ spv::Id TGlslangToSpvTraverser::getSymbolId(const glslang::TIntermSymbol* symbol #ifndef GLSLANG_WEB if (symbol->getType().isImage()) { std::vector memory; - TranslateMemoryDecoration(symbol->getType().getQualifier(), memory, glslangIntermediate->usingVulkanMemoryModel()); + TranslateMemoryDecoration(symbol->getType().getQualifier(), memory, + glslangIntermediate->usingVulkanMemoryModel()); for (unsigned int i = 0; i < memory.size(); ++i) builder.addDecoration(id, memory[i]); } @@ -7841,7 +8237,8 @@ spv::Id TGlslangToSpvTraverser::getSymbolId(const glslang::TIntermSymbol* symbol } if (symbol->isReference()) { - builder.addDecoration(id, symbol->getType().getQualifier().restrict ? spv::DecorationRestrictPointerEXT : spv::DecorationAliasedPointerEXT); + builder.addDecoration(id, symbol->getType().getQualifier().restrict ? + spv::DecorationRestrictPointerEXT : spv::DecorationAliasedPointerEXT); } #endif @@ -7903,12 +8300,25 @@ spv::Id TGlslangToSpvTraverser::createSpvConstant(const glslang::TIntermTyped& n // hand off to the non-spec-constant path assert(node.getAsConstantUnion() != nullptr || node.getAsSymbolNode() != nullptr); int nextConst = 0; - return createSpvConstantFromConstUnionArray(node.getType(), node.getAsConstantUnion() ? node.getAsConstantUnion()->getConstArray() : node.getAsSymbolNode()->getConstArray(), - nextConst, false); + return createSpvConstantFromConstUnionArray(node.getType(), node.getAsConstantUnion() ? + node.getAsConstantUnion()->getConstArray() : node.getAsSymbolNode()->getConstArray(), + nextConst, false); } // We now know we have a specialization constant to build + // Extra capabilities may be needed. + if (node.getType().contains8BitInt()) + builder.addCapability(spv::CapabilityInt8); + if (node.getType().contains16BitFloat()) + builder.addCapability(spv::CapabilityFloat16); + if (node.getType().contains16BitInt()) + builder.addCapability(spv::CapabilityInt16); + if (node.getType().contains64BitInt()) + builder.addCapability(spv::CapabilityInt64); + if (node.getType().containsDouble()) + builder.addCapability(spv::CapabilityFloat64); + // gl_WorkGroupSize is a special case until the front-end handles hierarchical specialization constants, // even then, it's specialization ids are handled by special case syntax in GLSL: layout(local_size_x = ... if (node.getType().getQualifier().builtIn == glslang::EbvWorkGroupSize) { @@ -7957,7 +8367,8 @@ spv::Id TGlslangToSpvTraverser::createSpvConstant(const glslang::TIntermTyped& n // If there are not enough elements present in 'consts', 0 will be substituted; // an empty 'consts' can be used to create a fully zeroed SPIR-V constant. // -spv::Id TGlslangToSpvTraverser::createSpvConstantFromConstUnionArray(const glslang::TType& glslangType, const glslang::TConstUnionArray& consts, int& nextConst, bool specConstant) +spv::Id TGlslangToSpvTraverser::createSpvConstantFromConstUnionArray(const glslang::TType& glslangType, + const glslang::TConstUnionArray& consts, int& nextConst, bool specConstant) { // vector of constants for SPIR-V std::vector spvConsts; @@ -8075,6 +8486,9 @@ spv::Id TGlslangToSpvTraverser::createSpvConstantFromConstUnionArray(const glsla scalar = builder.createUnaryOp(spv::OpBitcast, typeId, scalar); break; #endif + case glslang::EbtString: + scalar = builder.getStringId(consts[nextConst].getSConst()->c_str()); + break; default: assert(0); break; @@ -8175,7 +8589,8 @@ bool TGlslangToSpvTraverser::isTrivial(const glslang::TIntermTyped* node) // Emit short-circuiting code, where 'right' is never evaluated unless // the left side is true (for &&) or false (for ||). -spv::Id TGlslangToSpvTraverser::createShortCircuit(glslang::TOperator op, glslang::TIntermTyped& left, glslang::TIntermTyped& right) +spv::Id TGlslangToSpvTraverser::createShortCircuit(glslang::TOperator op, glslang::TIntermTyped& left, + glslang::TIntermTyped& right) { spv::Id boolTypeId = builder.makeBoolType(); @@ -8259,7 +8674,9 @@ int GetSpirvGeneratorVersion() // return 6; // revert version 5 change, which makes a different (new) kind of incorrect code, // versions 4 and 6 each generate OpArrayLength as it has long been done // return 7; // GLSL volatile keyword maps to both SPIR-V decorations Volatile and Coherent - return 8; // switch to new dead block eliminator; use OpUnreachable + // return 8; // switch to new dead block eliminator; use OpUnreachable + // return 9; // don't include opaque function parameters in OpEntryPoint global's operand list + return 10; // Generate OpFUnordNotEqual for != comparisons } // Write SPIR-V out to a binary file @@ -8279,14 +8696,15 @@ void OutputSpvBin(const std::vector& spirv, const char* baseName) // Write SPIR-V out to a text file with 32-bit hexadecimal words void OutputSpvHex(const std::vector& spirv, const char* baseName, const char* varName) { -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) std::ofstream out; out.open(baseName, std::ios::binary | std::ios::out); if (out.fail()) printf("ERROR: Failed to open file: %s\n", baseName); - out << "\t// " << - GetSpirvGeneratorVersion() << "." << GLSLANG_MINOR_VERSION << "." << GLSLANG_PATCH_LEVEL << - std::endl; + out << "\t// " << + GetSpirvGeneratorVersion() << + GLSLANG_VERSION_MAJOR << "." << GLSLANG_VERSION_MINOR << "." << GLSLANG_VERSION_PATCH << + GLSLANG_VERSION_FLAVOR << std::endl; if (varName != nullptr) { out << "\t #pragma once" << std::endl; out << "const uint32_t " << varName << "[] = {" << std::endl; @@ -8305,6 +8723,7 @@ void OutputSpvHex(const std::vector& spirv, const char* baseName, } if (varName != nullptr) { out << "};"; + out << std::endl; } out.close(); #endif @@ -8342,10 +8761,14 @@ void GlslangToSpv(const TIntermediate& intermediate, std::vector& // If from HLSL, run spirv-opt to "legalize" the SPIR-V for Vulkan // eg. forward and remove memory writes of opaque types. bool prelegalization = intermediate.getSource() == EShSourceHlsl; - if ((intermediate.getSource() == EShSourceHlsl || options->optimizeSize) && !options->disableOptimizer) { - SpirvToolsLegalize(intermediate, spirv, logger, options); + if ((prelegalization || options->optimizeSize) && !options->disableOptimizer) { + SpirvToolsTransform(intermediate, spirv, logger, options); prelegalization = false; } + else if (options->stripDebugInfo) { + // Strip debug info even if optimization is disabled. + SpirvToolsStripDebugInfo(intermediate, spirv, logger); + } if (options->validate) SpirvToolsValidate(intermediate, spirv, logger, prelegalization); diff --git a/thirdparty/glslang/SPIRV/GlslangToSpv.h b/thirdparty/glslang/SPIRV/GlslangToSpv.h old mode 100755 new mode 100644 diff --git a/thirdparty/glslang/SPIRV/Logger.cpp b/thirdparty/glslang/SPIRV/Logger.cpp index 7ea0c6342b7..cdc8469c447 100644 --- a/thirdparty/glslang/SPIRV/Logger.cpp +++ b/thirdparty/glslang/SPIRV/Logger.cpp @@ -69,4 +69,4 @@ std::string SpvBuildLogger::getAllMessages() const { } // end spv namespace -#endif \ No newline at end of file +#endif diff --git a/thirdparty/glslang/SPIRV/NonSemanticDebugPrintf.h b/thirdparty/glslang/SPIRV/NonSemanticDebugPrintf.h new file mode 100644 index 00000000000..83796d75e56 --- /dev/null +++ b/thirdparty/glslang/SPIRV/NonSemanticDebugPrintf.h @@ -0,0 +1,50 @@ +// Copyright (c) 2020 The Khronos Group Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and/or associated documentation files (the +// "Materials"), to deal in the Materials without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Materials, and to +// permit persons to whom the Materials are furnished to do so, subject to +// the following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Materials. +// +// MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS +// KHRONOS STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS +// SPECIFICATIONS AND HEADER INFORMATION ARE LOCATED AT +// https://www.khronos.org/registry/ +// +// THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +// MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. +// + +#ifndef SPIRV_UNIFIED1_NonSemanticDebugPrintf_H_ +#define SPIRV_UNIFIED1_NonSemanticDebugPrintf_H_ + +#ifdef __cplusplus +extern "C" { +#endif + +enum { + NonSemanticDebugPrintfRevision = 1, + NonSemanticDebugPrintfRevision_BitWidthPadding = 0x7fffffff +}; + +enum NonSemanticDebugPrintfInstructions { + NonSemanticDebugPrintfDebugPrintf = 1, + NonSemanticDebugPrintfInstructionsMax = 0x7fffffff +}; + + +#ifdef __cplusplus +} +#endif + +#endif // SPIRV_UNIFIED1_NonSemanticDebugPrintf_H_ diff --git a/thirdparty/glslang/SPIRV/SPVRemapper.cpp b/thirdparty/glslang/SPIRV/SPVRemapper.cpp index fd0bb8950c6..1adc6a3042b 100644 --- a/thirdparty/glslang/SPIRV/SPVRemapper.cpp +++ b/thirdparty/glslang/SPIRV/SPVRemapper.cpp @@ -625,6 +625,9 @@ namespace spv { break; } + case spv::OperandVariableLiteralStrings: + return nextInst; + // Execution mode might have extra literal operands. Skip them. case spv::OperandExecutionMode: return nextInst; diff --git a/thirdparty/glslang/SPIRV/SpvBuilder.cpp b/thirdparty/glslang/SPIRV/SpvBuilder.cpp index bd208952e02..96803314699 100644 --- a/thirdparty/glslang/SPIRV/SpvBuilder.cpp +++ b/thirdparty/glslang/SPIRV/SpvBuilder.cpp @@ -1,6 +1,7 @@ // // Copyright (C) 2014-2015 LunarG, Inc. // Copyright (C) 2015-2018 Google, Inc. +// Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved. // // All rights reserved. // @@ -496,7 +497,8 @@ Id Builder::makeFunctionType(Id returnType, const std::vector& paramTypes) return type->getResultId(); } -Id Builder::makeImageType(Id sampledType, Dim dim, bool depth, bool arrayed, bool ms, unsigned sampled, ImageFormat format) +Id Builder::makeImageType(Id sampledType, Dim dim, bool depth, bool arrayed, bool ms, unsigned sampled, + ImageFormat format) { assert(sampled == 1 || sampled == 2); @@ -601,16 +603,31 @@ Id Builder::makeSampledImageType(Id imageType) } #ifndef GLSLANG_WEB -Id Builder::makeAccelerationStructureNVType() +Id Builder::makeAccelerationStructureType() { Instruction *type; - if (groupedTypes[OpTypeAccelerationStructureNV].size() == 0) { - type = new Instruction(getUniqueId(), NoType, OpTypeAccelerationStructureNV); - groupedTypes[OpTypeAccelerationStructureNV].push_back(type); + if (groupedTypes[OpTypeAccelerationStructureKHR].size() == 0) { + type = new Instruction(getUniqueId(), NoType, OpTypeAccelerationStructureKHR); + groupedTypes[OpTypeAccelerationStructureKHR].push_back(type); constantsTypesGlobals.push_back(std::unique_ptr(type)); module.mapInstruction(type); } else { - type = groupedTypes[OpTypeAccelerationStructureNV].back(); + type = groupedTypes[OpTypeAccelerationStructureKHR].back(); + } + + return type->getResultId(); +} + +Id Builder::makeRayQueryType() +{ + Instruction *type; + if (groupedTypes[OpTypeRayQueryProvisionalKHR].size() == 0) { + type = new Instruction(getUniqueId(), NoType, OpTypeRayQueryProvisionalKHR); + groupedTypes[OpTypeRayQueryProvisionalKHR].push_back(type); + constantsTypesGlobals.push_back(std::unique_ptr(type)); + module.mapInstruction(type); + } else { + type = groupedTypes[OpTypeRayQueryProvisionalKHR].back(); } return type->getResultId(); @@ -1166,6 +1183,28 @@ void Builder::addExecutionMode(Function* entryPoint, ExecutionMode mode, int val executionModes.push_back(std::unique_ptr(instr)); } +void Builder::addExecutionMode(Function* entryPoint, ExecutionMode mode, const std::vector& literals) +{ + Instruction* instr = new Instruction(OpExecutionMode); + instr->addIdOperand(entryPoint->getId()); + instr->addImmediateOperand(mode); + for (auto literal : literals) + instr->addImmediateOperand(literal); + + executionModes.push_back(std::unique_ptr(instr)); +} + +void Builder::addExecutionModeId(Function* entryPoint, ExecutionMode mode, const std::vector& operandIds) +{ + Instruction* instr = new Instruction(OpExecutionModeId); + instr->addIdOperand(entryPoint->getId()); + instr->addImmediateOperand(mode); + for (auto operandId : operandIds) + instr->addIdOperand(operandId); + + executionModes.push_back(std::unique_ptr(instr)); +} + void Builder::addName(Id id, const char* string) { Instruction* name = new Instruction(OpName); @@ -1204,7 +1243,7 @@ void Builder::addDecoration(Id id, Decoration decoration, const char* s) if (decoration == spv::DecorationMax) return; - Instruction* dec = new Instruction(OpDecorateStringGOOGLE); + Instruction* dec = new Instruction(OpDecorateString); dec->addIdOperand(id); dec->addImmediateOperand(decoration); dec->addStringOperand(s); @@ -1212,6 +1251,34 @@ void Builder::addDecoration(Id id, Decoration decoration, const char* s) decorations.push_back(std::unique_ptr(dec)); } +void Builder::addDecoration(Id id, Decoration decoration, const std::vector& literals) +{ + if (decoration == spv::DecorationMax) + return; + + Instruction* dec = new Instruction(OpDecorate); + dec->addIdOperand(id); + dec->addImmediateOperand(decoration); + for (auto literal : literals) + dec->addImmediateOperand(literal); + + decorations.push_back(std::unique_ptr(dec)); +} + +void Builder::addDecoration(Id id, Decoration decoration, const std::vector& strings) +{ + if (decoration == spv::DecorationMax) + return; + + Instruction* dec = new Instruction(OpDecorateString); + dec->addIdOperand(id); + dec->addImmediateOperand(decoration); + for (auto string : strings) + dec->addStringOperand(string); + + decorations.push_back(std::unique_ptr(dec)); +} + void Builder::addDecorationId(Id id, Decoration decoration, Id idDecoration) { if (decoration == spv::DecorationMax) @@ -1225,6 +1292,21 @@ void Builder::addDecorationId(Id id, Decoration decoration, Id idDecoration) decorations.push_back(std::unique_ptr(dec)); } +void Builder::addDecorationId(Id id, Decoration decoration, const std::vector& operandIds) +{ + if(decoration == spv::DecorationMax) + return; + + Instruction* dec = new Instruction(OpDecorateId); + dec->addIdOperand(id); + dec->addImmediateOperand(decoration); + + for (auto operandId : operandIds) + dec->addIdOperand(operandId); + + decorations.push_back(std::unique_ptr(dec)); +} + void Builder::addMemberDecoration(Id id, unsigned int member, Decoration decoration, int num) { if (decoration == spv::DecorationMax) @@ -1254,6 +1336,36 @@ void Builder::addMemberDecoration(Id id, unsigned int member, Decoration decorat decorations.push_back(std::unique_ptr(dec)); } +void Builder::addMemberDecoration(Id id, unsigned int member, Decoration decoration, const std::vector& literals) +{ + if (decoration == spv::DecorationMax) + return; + + Instruction* dec = new Instruction(OpMemberDecorate); + dec->addIdOperand(id); + dec->addImmediateOperand(member); + dec->addImmediateOperand(decoration); + for (auto literal : literals) + dec->addImmediateOperand(literal); + + decorations.push_back(std::unique_ptr(dec)); +} + +void Builder::addMemberDecoration(Id id, unsigned int member, Decoration decoration, const std::vector& strings) +{ + if (decoration == spv::DecorationMax) + return; + + Instruction* dec = new Instruction(OpMemberDecorateString); + dec->addIdOperand(id); + dec->addImmediateOperand(member); + dec->addImmediateOperand(decoration); + for (auto string : strings) + dec->addStringOperand(string); + + decorations.push_back(std::unique_ptr(dec)); +} + // Comments in header Function* Builder::makeEntryPoint(const char* entryPoint) { @@ -1270,7 +1382,8 @@ Function* Builder::makeEntryPoint(const char* entryPoint) // Comments in header Function* Builder::makeFunctionEntry(Decoration precision, Id returnType, const char* name, - const std::vector& paramTypes, const std::vector>& decorations, Block **entry) + const std::vector& paramTypes, + const std::vector>& decorations, Block **entry) { // Make the function and initial instructions in it Id typeId = makeFunctionType(returnType, paramTypes); @@ -1279,9 +1392,12 @@ Function* Builder::makeFunctionEntry(Decoration precision, Id returnType, const // Set up the precisions setPrecision(function->getId(), precision); + function->setReturnPrecision(precision); for (unsigned p = 0; p < (unsigned)decorations.size(); ++p) { - for (int d = 0; d < (int)decorations[p].size(); ++d) + for (int d = 0; d < (int)decorations[p].size(); ++d) { addDecoration(firstParamId + p, decorations[p][d]); + function->addParamPrecision(p, decorations[p][d]); + } } // CFG @@ -1338,7 +1454,7 @@ void Builder::makeDiscard() } // Comments in header -Id Builder::createVariable(StorageClass storageClass, Id type, const char* name, Id initializer) +Id Builder::createVariable(Decoration precision, StorageClass storageClass, Id type, const char* name, Id initializer) { Id pointerType = makePointer(storageClass, type); Instruction* inst = new Instruction(getUniqueId(), pointerType, OpVariable); @@ -1360,6 +1476,7 @@ Id Builder::createVariable(StorageClass storageClass, Id type, const char* name, if (name) addName(inst->getResultId(), name); + setPrecision(inst->getResultId(), precision); return inst->getResultId(); } @@ -1373,7 +1490,8 @@ Id Builder::createUndefined(Id type) } // av/vis/nonprivate are unnecessary and illegal for some storage classes. -spv::MemoryAccessMask Builder::sanitizeMemoryAccessForStorageClass(spv::MemoryAccessMask memoryAccess, StorageClass sc) const +spv::MemoryAccessMask Builder::sanitizeMemoryAccessForStorageClass(spv::MemoryAccessMask memoryAccess, StorageClass sc) + const { switch (sc) { case spv::StorageClassUniform: @@ -1392,7 +1510,8 @@ spv::MemoryAccessMask Builder::sanitizeMemoryAccessForStorageClass(spv::MemoryAc } // Comments in header -void Builder::createStore(Id rValue, Id lValue, spv::MemoryAccessMask memoryAccess, spv::Scope scope, unsigned int alignment) +void Builder::createStore(Id rValue, Id lValue, spv::MemoryAccessMask memoryAccess, spv::Scope scope, + unsigned int alignment) { Instruction* store = new Instruction(OpStore); store->addIdOperand(lValue); @@ -1414,7 +1533,8 @@ void Builder::createStore(Id rValue, Id lValue, spv::MemoryAccessMask memoryAcce } // Comments in header -Id Builder::createLoad(Id lValue, spv::MemoryAccessMask memoryAccess, spv::Scope scope, unsigned int alignment) +Id Builder::createLoad(Id lValue, spv::Decoration precision, spv::MemoryAccessMask memoryAccess, + spv::Scope scope, unsigned int alignment) { Instruction* load = new Instruction(getUniqueId(), getDerefTypeId(lValue), OpLoad); load->addIdOperand(lValue); @@ -1432,6 +1552,7 @@ Id Builder::createLoad(Id lValue, spv::MemoryAccessMask memoryAccess, spv::Scope } buildPoint->addInstruction(std::unique_ptr(load)); + setPrecision(load->getResultId(), precision); return load->getResultId(); } @@ -1495,7 +1616,8 @@ Id Builder::createCompositeExtract(Id composite, Id typeId, unsigned index) // Generate code for spec constants if in spec constant operation // generation mode. if (generatingOpCodeForSpecConst) { - return createSpecConstantOp(OpCompositeExtract, typeId, std::vector(1, composite), std::vector(1, index)); + return createSpecConstantOp(OpCompositeExtract, typeId, std::vector(1, composite), + std::vector(1, index)); } Instruction* extract = new Instruction(getUniqueId(), typeId, OpCompositeExtract); extract->addIdOperand(composite); @@ -1697,7 +1819,8 @@ Id Builder::createOp(Op opCode, Id typeId, const std::vector& opera return op->getResultId(); } -Id Builder::createSpecConstantOp(Op opCode, Id typeId, const std::vector& operands, const std::vector& literals) +Id Builder::createSpecConstantOp(Op opCode, Id typeId, const std::vector& operands, + const std::vector& literals) { Instruction* op = new Instruction(getUniqueId(), typeId, OpSpecConstantOp); op->addImmediateOperand((unsigned) opCode); @@ -2144,7 +2267,7 @@ Id Builder::createCompositeCompare(Decoration precision, Id value1, Id value2, b Op op; switch (getMostBasicTypeClass(valueType)) { case OpTypeFloat: - op = equal ? OpFOrdEqual : OpFOrdNotEqual; + op = equal ? OpFOrdEqual : OpFUnordNotEqual; break; case OpTypeInt: default: @@ -2187,7 +2310,8 @@ Id Builder::createCompositeCompare(Decoration precision, Id value1, Id value2, b if (constituent == 0) resultId = subResultId; else - resultId = setPrecision(createBinOp(equal ? OpLogicalAnd : OpLogicalOr, boolType, resultId, subResultId), precision); + resultId = setPrecision(createBinOp(equal ? OpLogicalAnd : OpLogicalOr, boolType, resultId, subResultId), + precision); } return resultId; @@ -2196,7 +2320,8 @@ Id Builder::createCompositeCompare(Decoration precision, Id value1, Id value2, b // OpCompositeConstruct Id Builder::createCompositeConstruct(Id typeId, const std::vector& constituents) { - assert(isAggregateType(typeId) || (getNumTypeConstituents(typeId) > 1 && getNumTypeConstituents(typeId) == (int)constituents.size())); + assert(isAggregateType(typeId) || (getNumTypeConstituents(typeId) > 1 && + getNumTypeConstituents(typeId) == (int)constituents.size())); if (generatingOpCodeForSpecConst) { // Sometime, even in spec-constant-op mode, the constant composite to be @@ -2609,7 +2734,8 @@ void Builder::clearAccessChain() } // Comments in header -void Builder::accessChainPushSwizzle(std::vector& swizzle, Id preSwizzleBaseType, AccessChain::CoherentFlags coherentFlags, unsigned int alignment) +void Builder::accessChainPushSwizzle(std::vector& swizzle, Id preSwizzleBaseType, + AccessChain::CoherentFlags coherentFlags, unsigned int alignment) { accessChain.coherentFlags |= coherentFlags; accessChain.alignment |= alignment; @@ -2649,7 +2775,7 @@ void Builder::accessChainStore(Id rvalue, spv::MemoryAccessMask memoryAccess, sp // If swizzle still exists, it is out-of-order or not full, we must load the target vector, // extract and insert elements to perform writeMask and/or swizzle. if (accessChain.swizzle.size() > 0) { - Id tempBaseId = createLoad(base); + Id tempBaseId = createLoad(base, spv::NoPrecision); source = createLvalueSwizzle(getTypeId(tempBaseId), tempBaseId, source, accessChain.swizzle); } @@ -2663,7 +2789,8 @@ void Builder::accessChainStore(Id rvalue, spv::MemoryAccessMask memoryAccess, sp } // Comments in header -Id Builder::accessChainLoad(Decoration precision, Decoration nonUniform, Id resultType, spv::MemoryAccessMask memoryAccess, spv::Scope scope, unsigned int alignment) +Id Builder::accessChainLoad(Decoration precision, Decoration nonUniform, Id resultType, + spv::MemoryAccessMask memoryAccess, spv::Scope scope, unsigned int alignment) { Id id; @@ -2687,17 +2814,19 @@ Id Builder::accessChainLoad(Decoration precision, Decoration nonUniform, Id resu if (constant) { id = createCompositeExtract(accessChain.base, swizzleBase, indexes); + setPrecision(id, precision); } else { Id lValue = NoResult; - if (spvVersion >= Spv_1_4) { + if (spvVersion >= Spv_1_4 && isValidInitializer(accessChain.base)) { // make a new function variable for this r-value, using an initializer, // and mark it as NonWritable so that downstream it can be detected as a lookup // table - lValue = createVariable(StorageClassFunction, getTypeId(accessChain.base), "indexable", - accessChain.base); + lValue = createVariable(NoPrecision, StorageClassFunction, getTypeId(accessChain.base), + "indexable", accessChain.base); addDecoration(lValue, DecorationNonWritable); } else { - lValue = createVariable(StorageClassFunction, getTypeId(accessChain.base), "indexable"); + lValue = createVariable(NoPrecision, StorageClassFunction, getTypeId(accessChain.base), + "indexable"); // store into it createStore(accessChain.base, lValue); } @@ -2706,9 +2835,8 @@ Id Builder::accessChainLoad(Decoration precision, Decoration nonUniform, Id resu accessChain.isRValue = false; // load through the access chain - id = createLoad(collapseAccessChain()); + id = createLoad(collapseAccessChain(), precision); } - setPrecision(id, precision); } else id = accessChain.base; // no precision, it was set when this was defined } else { @@ -2721,8 +2849,13 @@ Id Builder::accessChainLoad(Decoration precision, Decoration nonUniform, Id resu } // load through the access chain - id = createLoad(collapseAccessChain(), memoryAccess, scope, alignment); - setPrecision(id, precision); + id = collapseAccessChain(); + // Apply nonuniform both to the access chain and the loaded value. + // Buffer accesses need the access chain decorated, and this is where + // loaded image types get decorated. TODO: This should maybe move to + // createImageTextureFunctionCall. + addDecoration(id, nonUniform); + id = createLoad(id, precision, memoryAccess, scope, alignment); addDecoration(id, nonUniform); } @@ -3075,7 +3208,8 @@ void Builder::dumpSourceInstructions(std::vector& out) const dumpSourceInstructions(iItr->first, *iItr->second, out); } -void Builder::dumpInstructions(std::vector& out, const std::vector >& instructions) const +void Builder::dumpInstructions(std::vector& out, + const std::vector >& instructions) const { for (int i = 0; i < (int)instructions.size(); ++i) { instructions[i]->dump(out); diff --git a/thirdparty/glslang/SPIRV/SpvBuilder.h b/thirdparty/glslang/SPIRV/SpvBuilder.h index 31fee975fcc..077945e77bf 100644 --- a/thirdparty/glslang/SPIRV/SpvBuilder.h +++ b/thirdparty/glslang/SPIRV/SpvBuilder.h @@ -1,7 +1,8 @@ // // Copyright (C) 2014-2015 LunarG, Inc. -// Copyright (C) 2015-2018 Google, Inc. +// Copyright (C) 2015-2020 Google, Inc. // Copyright (C) 2017 ARM Limited. +// Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved. // // All rights reserved. // @@ -94,6 +95,7 @@ public: const char* file_c_str = str.c_str(); fileString->addStringOperand(file_c_str); strings.push_back(std::unique_ptr(fileString)); + module.mapInstruction(fileString); stringIds[file_c_str] = strId; return strId; } @@ -181,7 +183,9 @@ public: Id makeCooperativeMatrixType(Id component, Id scope, Id rows, Id cols); // accelerationStructureNV type - Id makeAccelerationStructureNVType(); + Id makeAccelerationStructureType(); + // rayQueryEXT type + Id makeRayQueryType(); // For querying about types. Id getTypeId(Id resultId) const { return module.getTypeId(resultId); } @@ -196,7 +200,8 @@ public: Id getContainedTypeId(Id typeId) const; Id getContainedTypeId(Id typeId, int) const; StorageClass getTypeStorageClass(Id typeId) const { return module.getStorageClass(typeId); } - ImageFormat getImageTypeFormat(Id typeId) const { return (ImageFormat)module.getInstruction(typeId)->getImmediateOperand(6); } + ImageFormat getImageTypeFormat(Id typeId) const + { return (ImageFormat)module.getInstruction(typeId)->getImmediateOperand(6); } bool isPointer(Id resultId) const { return isPointerType(getTypeId(resultId)); } bool isScalar(Id resultId) const { return isScalarType(getTypeId(resultId)); } @@ -206,12 +211,17 @@ public: bool isAggregate(Id resultId) const { return isAggregateType(getTypeId(resultId)); } bool isSampledImage(Id resultId) const { return isSampledImageType(getTypeId(resultId)); } - bool isBoolType(Id typeId) { return groupedTypes[OpTypeBool].size() > 0 && typeId == groupedTypes[OpTypeBool].back()->getResultId(); } - bool isIntType(Id typeId) const { return getTypeClass(typeId) == OpTypeInt && module.getInstruction(typeId)->getImmediateOperand(1) != 0; } - bool isUintType(Id typeId) const { return getTypeClass(typeId) == OpTypeInt && module.getInstruction(typeId)->getImmediateOperand(1) == 0; } + bool isBoolType(Id typeId) + { return groupedTypes[OpTypeBool].size() > 0 && typeId == groupedTypes[OpTypeBool].back()->getResultId(); } + bool isIntType(Id typeId) const + { return getTypeClass(typeId) == OpTypeInt && module.getInstruction(typeId)->getImmediateOperand(1) != 0; } + bool isUintType(Id typeId) const + { return getTypeClass(typeId) == OpTypeInt && module.getInstruction(typeId)->getImmediateOperand(1) == 0; } bool isFloatType(Id typeId) const { return getTypeClass(typeId) == OpTypeFloat; } bool isPointerType(Id typeId) const { return getTypeClass(typeId) == OpTypePointer; } - bool isScalarType(Id typeId) const { return getTypeClass(typeId) == OpTypeFloat || getTypeClass(typeId) == OpTypeInt || getTypeClass(typeId) == OpTypeBool; } + bool isScalarType(Id typeId) const + { return getTypeClass(typeId) == OpTypeFloat || getTypeClass(typeId) == OpTypeInt || + getTypeClass(typeId) == OpTypeBool; } bool isVectorType(Id typeId) const { return getTypeClass(typeId) == OpTypeVector; } bool isMatrixType(Id typeId) const { return getTypeClass(typeId) == OpTypeMatrix; } bool isStructType(Id typeId) const { return getTypeClass(typeId) == OpTypeStruct; } @@ -221,7 +231,8 @@ public: #else bool isCooperativeMatrixType(Id typeId)const { return getTypeClass(typeId) == OpTypeCooperativeMatrixNV; } #endif - bool isAggregateType(Id typeId) const { return isArrayType(typeId) || isStructType(typeId) || isCooperativeMatrixType(typeId); } + bool isAggregateType(Id typeId) const + { return isArrayType(typeId) || isStructType(typeId) || isCooperativeMatrixType(typeId); } bool isImageType(Id typeId) const { return getTypeClass(typeId) == OpTypeImage; } bool isSamplerType(Id typeId) const { return getTypeClass(typeId) == OpTypeSampler; } bool isSampledImageType(Id typeId) const { return getTypeClass(typeId) == OpTypeSampledImage; } @@ -233,9 +244,17 @@ public: bool isConstant(Id resultId) const { return isConstantOpCode(getOpCode(resultId)); } bool isConstantScalar(Id resultId) const { return getOpCode(resultId) == OpConstant; } bool isSpecConstant(Id resultId) const { return isSpecConstantOpCode(getOpCode(resultId)); } - unsigned int getConstantScalar(Id resultId) const { return module.getInstruction(resultId)->getImmediateOperand(0); } + unsigned int getConstantScalar(Id resultId) const + { return module.getInstruction(resultId)->getImmediateOperand(0); } StorageClass getStorageClass(Id resultId) const { return getTypeStorageClass(getTypeId(resultId)); } + bool isVariableOpCode(Op opcode) const { return opcode == OpVariable; } + bool isVariable(Id resultId) const { return isVariableOpCode(getOpCode(resultId)); } + bool isGlobalStorage(Id resultId) const { return getStorageClass(resultId) != StorageClassFunction; } + bool isGlobalVariable(Id resultId) const { return isVariable(resultId) && isGlobalStorage(resultId); } + // See if a resultId is valid for use as an initializer. + bool isValidInitializer(Id resultId) const { return isConstant(resultId) || isGlobalVariable(resultId); } + int getScalarTypeWidth(Id typeId) const { Id scalarTypeId = getScalarTypeId(typeId); @@ -275,14 +294,22 @@ public: // For making new constants (will return old constant if the requested one was already made). Id makeBoolConstant(bool b, bool specConstant = false); - Id makeInt8Constant(int i, bool specConstant = false) { return makeIntConstant(makeIntType(8), (unsigned)i, specConstant); } - Id makeUint8Constant(unsigned u, bool specConstant = false) { return makeIntConstant(makeUintType(8), u, specConstant); } - Id makeInt16Constant(int i, bool specConstant = false) { return makeIntConstant(makeIntType(16), (unsigned)i, specConstant); } - Id makeUint16Constant(unsigned u, bool specConstant = false) { return makeIntConstant(makeUintType(16), u, specConstant); } - Id makeIntConstant(int i, bool specConstant = false) { return makeIntConstant(makeIntType(32), (unsigned)i, specConstant); } - Id makeUintConstant(unsigned u, bool specConstant = false) { return makeIntConstant(makeUintType(32), u, specConstant); } - Id makeInt64Constant(long long i, bool specConstant = false) { return makeInt64Constant(makeIntType(64), (unsigned long long)i, specConstant); } - Id makeUint64Constant(unsigned long long u, bool specConstant = false) { return makeInt64Constant(makeUintType(64), u, specConstant); } + Id makeInt8Constant(int i, bool specConstant = false) + { return makeIntConstant(makeIntType(8), (unsigned)i, specConstant); } + Id makeUint8Constant(unsigned u, bool specConstant = false) + { return makeIntConstant(makeUintType(8), u, specConstant); } + Id makeInt16Constant(int i, bool specConstant = false) + { return makeIntConstant(makeIntType(16), (unsigned)i, specConstant); } + Id makeUint16Constant(unsigned u, bool specConstant = false) + { return makeIntConstant(makeUintType(16), u, specConstant); } + Id makeIntConstant(int i, bool specConstant = false) + { return makeIntConstant(makeIntType(32), (unsigned)i, specConstant); } + Id makeUintConstant(unsigned u, bool specConstant = false) + { return makeIntConstant(makeUintType(32), u, specConstant); } + Id makeInt64Constant(long long i, bool specConstant = false) + { return makeInt64Constant(makeIntType(64), (unsigned long long)i, specConstant); } + Id makeUint64Constant(unsigned long long u, bool specConstant = false) + { return makeInt64Constant(makeUintType(64), u, specConstant); } Id makeFloatConstant(float f, bool specConstant = false); Id makeDoubleConstant(double d, bool specConstant = false); Id makeFloat16Constant(float f16, bool specConstant = false); @@ -294,13 +321,20 @@ public: // Methods for adding information outside the CFG. Instruction* addEntryPoint(ExecutionModel, Function*, const char* name); void addExecutionMode(Function*, ExecutionMode mode, int value1 = -1, int value2 = -1, int value3 = -1); + void addExecutionMode(Function*, ExecutionMode mode, const std::vector& literals); + void addExecutionModeId(Function*, ExecutionMode mode, const std::vector& operandIds); void addName(Id, const char* name); void addMemberName(Id, int member, const char* name); void addDecoration(Id, Decoration, int num = -1); void addDecoration(Id, Decoration, const char*); + void addDecoration(Id, Decoration, const std::vector& literals); + void addDecoration(Id, Decoration, const std::vector& strings); void addDecorationId(Id id, Decoration, Id idDecoration); + void addDecorationId(Id id, Decoration, const std::vector& operandIds); void addMemberDecoration(Id, unsigned int member, Decoration, int num = -1); void addMemberDecoration(Id, unsigned int member, Decoration, const char*); + void addMemberDecoration(Id, unsigned int member, Decoration, const std::vector& literals); + void addMemberDecoration(Id, unsigned int member, Decoration, const std::vector& strings); // At the end of what block do the next create*() instructions go? void setBuildPoint(Block* bp) { buildPoint = bp; } @@ -313,8 +347,8 @@ public: // Make a shader-style function, and create its entry block if entry is non-zero. // Return the function, pass back the entry. // The returned pointer is only valid for the lifetime of this builder. - Function* makeFunctionEntry(Decoration precision, Id returnType, const char* name, const std::vector& paramTypes, - const std::vector>& precisions, Block **entry = 0); + Function* makeFunctionEntry(Decoration precision, Id returnType, const char* name, + const std::vector& paramTypes, const std::vector>& precisions, Block **entry = 0); // Create a return. An 'implicit' return is one not appearing in the source // code. In the case of an implicit return, no post-return block is inserted. @@ -327,16 +361,20 @@ public: void makeDiscard(); // Create a global or function local or IO variable. - Id createVariable(StorageClass, Id type, const char* name = 0, Id initializer = NoResult); + Id createVariable(Decoration precision, StorageClass, Id type, const char* name = nullptr, + Id initializer = NoResult); // Create an intermediate with an undefined value. Id createUndefined(Id type); // Store into an Id and return the l-value - void createStore(Id rValue, Id lValue, spv::MemoryAccessMask memoryAccess = spv::MemoryAccessMaskNone, spv::Scope scope = spv::ScopeMax, unsigned int alignment = 0); + void createStore(Id rValue, Id lValue, spv::MemoryAccessMask memoryAccess = spv::MemoryAccessMaskNone, + spv::Scope scope = spv::ScopeMax, unsigned int alignment = 0); // Load from an Id and return it - Id createLoad(Id lValue, spv::MemoryAccessMask memoryAccess = spv::MemoryAccessMaskNone, spv::Scope scope = spv::ScopeMax, unsigned int alignment = 0); + Id createLoad(Id lValue, spv::Decoration precision, + spv::MemoryAccessMask memoryAccess = spv::MemoryAccessMaskNone, + spv::Scope scope = spv::ScopeMax, unsigned int alignment = 0); // Create an OpAccessChain instruction Id createAccessChain(StorageClass, Id base, const std::vector& offsets); @@ -495,7 +533,7 @@ public: // recursion stack can hold the memory for it. // void makeSwitch(Id condition, unsigned int control, int numSegments, const std::vector& caseValues, - const std::vector& valueToSegment, int defaultSegment, std::vector& segmentBB); // return argument + const std::vector& valueToSegment, int defaultSegment, std::vector& segmentBB); // Add a branch to the innermost switch's merge block. void addSwitchBreak(); @@ -512,7 +550,7 @@ public: Block &head, &body, &merge, &continue_target; private: LoopBlocks(); - LoopBlocks& operator=(const LoopBlocks&); + LoopBlocks& operator=(const LoopBlocks&) = delete; }; // Start a new loop and prepare the builder to generate code for it. Until @@ -569,10 +607,13 @@ public: std::vector indexChain; Id instr; // cache the instruction that generates this access chain std::vector swizzle; // each std::vector element selects the next GLSL component number - Id component; // a dynamic component index, can coexist with a swizzle, done after the swizzle, NoResult if not present - Id preSwizzleBaseType; // dereferenced type, before swizzle or component is applied; NoType unless a swizzle or component is present + Id component; // a dynamic component index, can coexist with a swizzle, + // done after the swizzle, NoResult if not present + Id preSwizzleBaseType; // dereferenced type, before swizzle or component is applied; + // NoType unless a swizzle or component is present bool isRValue; // true if 'base' is an r-value, otherwise, base is an l-value - unsigned int alignment; // bitwise OR of alignment values passed in. Accumulates worst alignment. Only tracks base and (optional) component selection alignment. + unsigned int alignment; // bitwise OR of alignment values passed in. Accumulates worst alignment. + // Only tracks base and (optional) component selection alignment. // Accumulate whether anything in the chain of structures has coherent decorations. struct CoherentFlags { @@ -583,12 +624,17 @@ public: CoherentFlags operator |=(const CoherentFlags &other) { return *this; } #else bool isVolatile() const { return volatil; } + bool anyCoherent() const { + return coherent || devicecoherent || queuefamilycoherent || workgroupcoherent || + subgroupcoherent || shadercallcoherent; + } unsigned coherent : 1; unsigned devicecoherent : 1; unsigned queuefamilycoherent : 1; unsigned workgroupcoherent : 1; unsigned subgroupcoherent : 1; + unsigned shadercallcoherent : 1; unsigned nonprivate : 1; unsigned volatil : 1; unsigned isImage : 1; @@ -599,6 +645,7 @@ public: queuefamilycoherent = 0; workgroupcoherent = 0; subgroupcoherent = 0; + shadercallcoherent = 0; nonprivate = 0; volatil = 0; isImage = 0; @@ -610,6 +657,7 @@ public: queuefamilycoherent |= other.queuefamilycoherent; workgroupcoherent |= other.workgroupcoherent; subgroupcoherent |= other.subgroupcoherent; + shadercallcoherent |= other.shadercallcoherent; nonprivate |= other.nonprivate; volatil |= other.volatil; isImage |= other.isImage; @@ -655,11 +703,13 @@ public: } // push new swizzle onto the end of any existing swizzle, merging into a single swizzle - void accessChainPushSwizzle(std::vector& swizzle, Id preSwizzleBaseType, AccessChain::CoherentFlags coherentFlags, unsigned int alignment); + void accessChainPushSwizzle(std::vector& swizzle, Id preSwizzleBaseType, + AccessChain::CoherentFlags coherentFlags, unsigned int alignment); // push a dynamic component selection onto the access chain, only applicable with a // non-trivial swizzle or no swizzle - void accessChainPushComponent(Id component, Id preSwizzleBaseType, AccessChain::CoherentFlags coherentFlags, unsigned int alignment) + void accessChainPushComponent(Id component, Id preSwizzleBaseType, AccessChain::CoherentFlags coherentFlags, + unsigned int alignment) { if (accessChain.swizzle.size() != 1) { accessChain.component = component; @@ -671,10 +721,18 @@ public: } // use accessChain and swizzle to store value - void accessChainStore(Id rvalue, spv::MemoryAccessMask memoryAccess = spv::MemoryAccessMaskNone, spv::Scope scope = spv::ScopeMax, unsigned int alignment = 0); + void accessChainStore(Id rvalue, spv::MemoryAccessMask memoryAccess = spv::MemoryAccessMaskNone, + spv::Scope scope = spv::ScopeMax, unsigned int alignment = 0); // use accessChain and swizzle to load an r-value - Id accessChainLoad(Decoration precision, Decoration nonUniform, Id ResultType, spv::MemoryAccessMask memoryAccess = spv::MemoryAccessMaskNone, spv::Scope scope = spv::ScopeMax, unsigned int alignment = 0); + Id accessChainLoad(Decoration precision, Decoration nonUniform, Id ResultType, + spv::MemoryAccessMask memoryAccess = spv::MemoryAccessMaskNone, spv::Scope scope = spv::ScopeMax, + unsigned int alignment = 0); + + // Return whether or not the access chain can be represented in SPIR-V + // as an l-value. + // E.g., a[3].yx cannot be, while a[3].y and a[3].y[x] can be. + bool isSpvLvalue() const { return accessChain.swizzle.size() <= 1; } // get the direct pointer for an l-value Id accessChainGetLValue(); @@ -703,7 +761,8 @@ public: void createBranch(Block* block); void createConditionalBranch(Id condition, Block* thenBlock, Block* elseBlock); - void createLoopMerge(Block* mergeBlock, Block* continueBlock, unsigned int control, const std::vector& operands); + void createLoopMerge(Block* mergeBlock, Block* continueBlock, unsigned int control, + const std::vector& operands); // Sets to generate opcode for specialization constants. void setToSpecConstCodeGenMode() { generatingOpCodeForSpecConst = true; } @@ -729,7 +788,8 @@ public: void dumpSourceInstructions(const spv::Id fileId, const std::string& text, std::vector&) const; void dumpInstructions(std::vector&, const std::vector >&) const; void dumpModuleProcesses(std::vector&) const; - spv::MemoryAccessMask sanitizeMemoryAccessForStorageClass(spv::MemoryAccessMask memoryAccess, StorageClass sc) const; + spv::MemoryAccessMask sanitizeMemoryAccessForStorageClass(spv::MemoryAccessMask memoryAccess, StorageClass sc) + const; unsigned int spvVersion; // the version of SPIR-V to emit in the header SourceLanguage source; @@ -764,10 +824,14 @@ public: std::vector > externals; std::vector > functions; - // not output, internally used for quick & dirty canonical (unique) creation - std::unordered_map> groupedConstants; // map type opcodes to constant inst. - std::unordered_map> groupedStructConstants; // map struct-id to constant instructions - std::unordered_map> groupedTypes; // map type opcodes to type instructions + // not output, internally used for quick & dirty canonical (unique) creation + + // map type opcodes to constant inst. + std::unordered_map> groupedConstants; + // map struct-id to constant instructions + std::unordered_map> groupedStructConstants; + // map type opcodes to type instructions + std::unordered_map> groupedTypes; // stack of switches std::stack switchMerges; diff --git a/thirdparty/glslang/SPIRV/SpvTools.cpp b/thirdparty/glslang/SPIRV/SpvTools.cpp index 97bd4e74272..112ac33c5cc 100644 --- a/thirdparty/glslang/SPIRV/SpvTools.cpp +++ b/thirdparty/glslang/SPIRV/SpvTools.cpp @@ -1,6 +1,6 @@ // // Copyright (C) 2014-2016 LunarG, Inc. -// Copyright (C) 2018 Google, Inc. +// Copyright (C) 2018-2020 Google, Inc. // // All rights reserved. // @@ -80,6 +80,39 @@ spv_target_env MapToSpirvToolsEnv(const SpvVersion& spvVersion, spv::SpvBuildLog return spv_target_env::SPV_ENV_UNIVERSAL_1_0; } +// Callback passed to spvtools::Optimizer::SetMessageConsumer +void OptimizerMesssageConsumer(spv_message_level_t level, const char *source, + const spv_position_t &position, const char *message) +{ + auto &out = std::cerr; + switch (level) + { + case SPV_MSG_FATAL: + case SPV_MSG_INTERNAL_ERROR: + case SPV_MSG_ERROR: + out << "error: "; + break; + case SPV_MSG_WARNING: + out << "warning: "; + break; + case SPV_MSG_INFO: + case SPV_MSG_DEBUG: + out << "info: "; + break; + default: + break; + } + if (source) + { + out << source << ":"; + } + out << position.line << ":" << position.column << ":" << position.index << ":"; + if (message) + { + out << " " << message; + } + out << std::endl; +} // Use the SPIRV-Tools disassembler to print SPIR-V. void SpirvToolsDisassemble(std::ostream& out, const std::vector& spirv) @@ -128,45 +161,14 @@ void SpirvToolsValidate(const glslang::TIntermediate& intermediate, std::vector< spvContextDestroy(context); } -// Apply the SPIRV-Tools optimizer to generated SPIR-V, for the purpose of -// legalizing HLSL SPIR-V. -void SpirvToolsLegalize(const glslang::TIntermediate&, std::vector& spirv, - spv::SpvBuildLogger*, const SpvOptions* options) +// Apply the SPIRV-Tools optimizer to generated SPIR-V. HLSL SPIR-V is legalized in the process. +void SpirvToolsTransform(const glslang::TIntermediate& intermediate, std::vector& spirv, + spv::SpvBuildLogger* logger, const SpvOptions* options) { - spv_target_env target_env = SPV_ENV_UNIVERSAL_1_2; + spv_target_env target_env = MapToSpirvToolsEnv(intermediate.getSpv(), logger); spvtools::Optimizer optimizer(target_env); - optimizer.SetMessageConsumer( - [](spv_message_level_t level, const char *source, const spv_position_t &position, const char *message) { - auto &out = std::cerr; - switch (level) - { - case SPV_MSG_FATAL: - case SPV_MSG_INTERNAL_ERROR: - case SPV_MSG_ERROR: - out << "error: "; - break; - case SPV_MSG_WARNING: - out << "warning: "; - break; - case SPV_MSG_INFO: - case SPV_MSG_DEBUG: - out << "info: "; - break; - default: - break; - } - if (source) - { - out << source << ":"; - } - out << position.line << ":" << position.column << ":" << position.index << ":"; - if (message) - { - out << " " << message; - } - out << std::endl; - }); + optimizer.SetMessageConsumer(OptimizerMesssageConsumer); // If debug (specifically source line info) is being generated, propagate // line information into all SPIR-V instructions. This avoids loss of @@ -175,6 +177,9 @@ void SpirvToolsLegalize(const glslang::TIntermediate&, std::vector if (options->generateDebugInfo) { optimizer.RegisterPass(spvtools::CreatePropagateLineInfoPass()); } + else if (options->stripDebugInfo) { + optimizer.RegisterPass(spvtools::CreateStripDebugInfoPass()); + } optimizer.RegisterPass(spvtools::CreateWrapOpKillPass()); optimizer.RegisterPass(spvtools::CreateDeadBranchElimPass()); optimizer.RegisterPass(spvtools::CreateMergeReturnPass()); @@ -207,7 +212,27 @@ void SpirvToolsLegalize(const glslang::TIntermediate&, std::vector } spvtools::OptimizerOptions spvOptOptions; - spvOptOptions.set_run_validator(false); // The validator may run as a seperate step later on + optimizer.SetTargetEnv(MapToSpirvToolsEnv(intermediate.getSpv(), logger)); + spvOptOptions.set_run_validator(false); // The validator may run as a separate step later on + optimizer.Run(spirv.data(), spirv.size(), &spirv, spvOptOptions); +} + +// Apply the SPIRV-Tools optimizer to strip debug info from SPIR-V. This is implicitly done by +// SpirvToolsTransform if spvOptions->stripDebugInfo is set, but can be called separately if +// optimization is disabled. +void SpirvToolsStripDebugInfo(const glslang::TIntermediate& intermediate, + std::vector& spirv, spv::SpvBuildLogger* logger) +{ + spv_target_env target_env = MapToSpirvToolsEnv(intermediate.getSpv(), logger); + + spvtools::Optimizer optimizer(target_env); + optimizer.SetMessageConsumer(OptimizerMesssageConsumer); + + optimizer.RegisterPass(spvtools::CreateStripDebugInfoPass()); + + spvtools::OptimizerOptions spvOptOptions; + optimizer.SetTargetEnv(MapToSpirvToolsEnv(intermediate.getSpv(), logger)); + spvOptOptions.set_run_validator(false); // The validator may run as a separate step later on optimizer.Run(spirv.data(), spirv.size(), &spirv, spvOptOptions); } diff --git a/thirdparty/glslang/SPIRV/SpvTools.h b/thirdparty/glslang/SPIRV/SpvTools.h index 59c914da0b7..7779dfa779f 100644 --- a/thirdparty/glslang/SPIRV/SpvTools.h +++ b/thirdparty/glslang/SPIRV/SpvTools.h @@ -52,9 +52,10 @@ namespace glslang { struct SpvOptions { - SpvOptions() : generateDebugInfo(false), disableOptimizer(true), + SpvOptions() : generateDebugInfo(false), stripDebugInfo(false), disableOptimizer(true), optimizeSize(false), disassemble(false), validate(false) { } bool generateDebugInfo; + bool stripDebugInfo; bool disableOptimizer; bool optimizeSize; bool disassemble; @@ -70,10 +71,15 @@ void SpirvToolsDisassemble(std::ostream& out, const std::vector& s void SpirvToolsValidate(const glslang::TIntermediate& intermediate, std::vector& spirv, spv::SpvBuildLogger*, bool prelegalization); -// Apply the SPIRV-Tools optimizer to generated SPIR-V, for the purpose of -// legalizing HLSL SPIR-V. -void SpirvToolsLegalize(const glslang::TIntermediate& intermediate, std::vector& spirv, - spv::SpvBuildLogger*, const SpvOptions*); +// Apply the SPIRV-Tools optimizer to generated SPIR-V. HLSL SPIR-V is legalized in the process. +void SpirvToolsTransform(const glslang::TIntermediate& intermediate, std::vector& spirv, + spv::SpvBuildLogger*, const SpvOptions*); + +// Apply the SPIRV-Tools optimizer to strip debug info from SPIR-V. This is implicitly done by +// SpirvToolsTransform if spvOptions->stripDebugInfo is set, but can be called separately if +// optimization is disabled. +void SpirvToolsStripDebugInfo(const glslang::TIntermediate& intermediate, + std::vector& spirv, spv::SpvBuildLogger*); #endif diff --git a/thirdparty/glslang/SPIRV/disassemble.cpp b/thirdparty/glslang/SPIRV/disassemble.cpp index 930e7994936..a95ded49d23 100644 --- a/thirdparty/glslang/SPIRV/disassemble.cpp +++ b/thirdparty/glslang/SPIRV/disassemble.cpp @@ -75,6 +75,7 @@ enum ExtInstSet { GLSLextAMDInst, GLSLextNVInst, OpenCLExtInst, + NonSemanticDebugPrintfExtInst, }; // Container class for a single instance of a SPIR-V stream, with methods for disassembly. @@ -480,8 +481,12 @@ void SpirvStream::disassembleInstruction(Id resultId, Id /*typeId*/, Op opCode, if (opCode == OpExtInst) { ExtInstSet extInstSet = GLSL450Inst; const char* name = idDescriptor[stream[word - 2]].c_str(); - if (0 == memcmp("OpenCL", name, 6)) { + if (strcmp("OpenCL.std", name) == 0) { extInstSet = OpenCLExtInst; + } else if (strcmp("OpenCL.DebugInfo.100", name) == 0) { + extInstSet = OpenCLExtInst; + } else if (strcmp("NonSemantic.DebugPrintf", name) == 0) { + extInstSet = NonSemanticDebugPrintfExtInst; } else if (strcmp(spv::E_SPV_AMD_shader_ballot, name) == 0 || strcmp(spv::E_SPV_AMD_shader_trinary_minmax, name) == 0 || strcmp(spv::E_SPV_AMD_shader_explicit_vertex_parameter, name) == 0 || @@ -505,6 +510,8 @@ void SpirvStream::disassembleInstruction(Id resultId, Id /*typeId*/, Op opCode, } else if (extInstSet == GLSLextNVInst) { out << "(" << GLSLextNVGetDebugNames(name, entrypoint) << ")"; + } else if (extInstSet == NonSemanticDebugPrintfExtInst) { + out << "(DebugPrintf)"; } } break; @@ -512,6 +519,10 @@ void SpirvStream::disassembleInstruction(Id resultId, Id /*typeId*/, Op opCode, case OperandLiteralString: numOperands -= disassembleString(); break; + case OperandVariableLiteralStrings: + while (numOperands > 0) + numOperands -= disassembleString(); + return; case OperandMemoryAccess: outputMask(OperandMemoryAccess, stream[word++]); --numOperands; diff --git a/thirdparty/glslang/SPIRV/doc.cpp b/thirdparty/glslang/SPIRV/doc.cpp index bee5c79729d..1d052f8c298 100644 --- a/thirdparty/glslang/SPIRV/doc.cpp +++ b/thirdparty/glslang/SPIRV/doc.cpp @@ -1,5 +1,6 @@ // // Copyright (C) 2014-2015 LunarG, Inc. +// Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved. // // All rights reserved. // @@ -99,12 +100,12 @@ const char* ExecutionModelString(int model) default: return "Bad"; - case ExecutionModelRayGenerationNV: return "RayGenerationNV"; - case ExecutionModelIntersectionNV: return "IntersectionNV"; - case ExecutionModelAnyHitNV: return "AnyHitNV"; - case ExecutionModelClosestHitNV: return "ClosestHitNV"; - case ExecutionModelMissNV: return "MissNV"; - case ExecutionModelCallableNV: return "CallableNV"; + case ExecutionModelRayGenerationKHR: return "RayGenerationKHR"; + case ExecutionModelIntersectionKHR: return "IntersectionKHR"; + case ExecutionModelAnyHitKHR: return "AnyHitKHR"; + case ExecutionModelClosestHitKHR: return "ClosestHitKHR"; + case ExecutionModelMissKHR: return "MissKHR"; + case ExecutionModelCallableKHR: return "CallableKHR"; } } @@ -133,7 +134,7 @@ const char* MemoryString(int mem) } } -const int ExecutionModeCeiling = 33; +const int ExecutionModeCeiling = 40; const char* ExecutionModeString(int mode) { @@ -172,7 +173,21 @@ const char* ExecutionModeString(int mode) case 31: return "ContractionOff"; case 32: return "Bad"; - case 4446: return "PostDepthCoverage"; + case ExecutionModeInitializer: return "Initializer"; + case ExecutionModeFinalizer: return "Finalizer"; + case ExecutionModeSubgroupSize: return "SubgroupSize"; + case ExecutionModeSubgroupsPerWorkgroup: return "SubgroupsPerWorkgroup"; + case ExecutionModeSubgroupsPerWorkgroupId: return "SubgroupsPerWorkgroupId"; + case ExecutionModeLocalSizeId: return "LocalSizeId"; + case ExecutionModeLocalSizeHintId: return "LocalSizeHintId"; + + case ExecutionModePostDepthCoverage: return "PostDepthCoverage"; + case ExecutionModeDenormPreserve: return "DenormPreserve"; + case ExecutionModeDenormFlushToZero: return "DenormFlushToZero"; + case ExecutionModeSignedZeroInfNanPreserve: return "SignedZeroInfNanPreserve"; + case ExecutionModeRoundingModeRTE: return "RoundingModeRTE"; + case ExecutionModeRoundingModeRTZ: return "RoundingModeRTZ"; + case ExecutionModeStencilRefReplacingEXT: return "StencilRefReplacingEXT"; case ExecutionModeOutputLinesNV: return "OutputLinesNV"; case ExecutionModeOutputPrimitivesNV: return "OutputPrimitivesNV"; @@ -187,6 +202,11 @@ const char* ExecutionModeString(int mode) case ExecutionModeShadingRateInterlockOrderedEXT: return "ShadingRateInterlockOrderedEXT"; case ExecutionModeShadingRateInterlockUnorderedEXT: return "ShadingRateInterlockUnorderedEXT"; + case ExecutionModeMaxWorkgroupSizeINTEL: return "MaxWorkgroupSizeINTEL"; + case ExecutionModeMaxWorkDimINTEL: return "MaxWorkDimINTEL"; + case ExecutionModeNoGlobalOffsetINTEL: return "NoGlobalOffsetINTEL"; + case ExecutionModeNumSIMDWorkitemsINTEL: return "NumSIMDWorkitemsINTEL"; + case ExecutionModeCeiling: default: return "Bad"; } @@ -209,12 +229,12 @@ const char* StorageClassString(int StorageClass) case 11: return "Image"; case 12: return "StorageBuffer"; - case StorageClassRayPayloadNV: return "RayPayloadNV"; - case StorageClassHitAttributeNV: return "HitAttributeNV"; - case StorageClassIncomingRayPayloadNV: return "IncomingRayPayloadNV"; - case StorageClassShaderRecordBufferNV: return "ShaderRecordBufferNV"; - case StorageClassCallableDataNV: return "CallableDataNV"; - case StorageClassIncomingCallableDataNV: return "IncomingCallableDataNV"; + case StorageClassRayPayloadKHR: return "RayPayloadKHR"; + case StorageClassHitAttributeKHR: return "HitAttributeKHR"; + case StorageClassIncomingRayPayloadKHR: return "IncomingRayPayloadKHR"; + case StorageClassShaderRecordBufferKHR: return "ShaderRecordBufferKHR"; + case StorageClassCallableDataKHR: return "CallableDataKHR"; + case StorageClassIncomingCallableDataKHR: return "IncomingCallableDataKHR"; case StorageClassPhysicalStorageBufferEXT: return "PhysicalStorageBufferEXT"; @@ -361,32 +381,33 @@ const char* BuiltInString(int builtIn) case 4996: return "BaryCoordSmoothCentroidAMD"; case 4997: return "BaryCoordSmoothSampleAMD"; case 4998: return "BaryCoordPullModelAMD"; - case BuiltInLaunchIdNV: return "LaunchIdNV"; - case BuiltInLaunchSizeNV: return "LaunchSizeNV"; - case BuiltInWorldRayOriginNV: return "WorldRayOriginNV"; - case BuiltInWorldRayDirectionNV: return "WorldRayDirectionNV"; - case BuiltInObjectRayOriginNV: return "ObjectRayOriginNV"; - case BuiltInObjectRayDirectionNV: return "ObjectRayDirectionNV"; - case BuiltInRayTminNV: return "RayTminNV"; - case BuiltInRayTmaxNV: return "RayTmaxNV"; - case BuiltInInstanceCustomIndexNV: return "InstanceCustomIndexNV"; - case BuiltInObjectToWorldNV: return "ObjectToWorldNV"; - case BuiltInWorldToObjectNV: return "WorldToObjectNV"; - case BuiltInHitTNV: return "HitTNV"; - case BuiltInHitKindNV: return "HitKindNV"; - case BuiltInIncomingRayFlagsNV: return "IncomingRayFlagsNV"; - case BuiltInViewportMaskNV: return "ViewportMaskNV"; - case BuiltInSecondaryPositionNV: return "SecondaryPositionNV"; - case BuiltInSecondaryViewportMaskNV: return "SecondaryViewportMaskNV"; - case BuiltInPositionPerViewNV: return "PositionPerViewNV"; - case BuiltInViewportMaskPerViewNV: return "ViewportMaskPerViewNV"; + case BuiltInLaunchIdKHR: return "LaunchIdKHR"; + case BuiltInLaunchSizeKHR: return "LaunchSizeKHR"; + case BuiltInWorldRayOriginKHR: return "WorldRayOriginKHR"; + case BuiltInWorldRayDirectionKHR: return "WorldRayDirectionKHR"; + case BuiltInObjectRayOriginKHR: return "ObjectRayOriginKHR"; + case BuiltInObjectRayDirectionKHR: return "ObjectRayDirectionKHR"; + case BuiltInRayTminKHR: return "RayTminKHR"; + case BuiltInRayTmaxKHR: return "RayTmaxKHR"; + case BuiltInInstanceCustomIndexKHR: return "InstanceCustomIndexKHR"; + case BuiltInRayGeometryIndexKHR: return "RayGeometryIndexKHR"; + case BuiltInObjectToWorldKHR: return "ObjectToWorldKHR"; + case BuiltInWorldToObjectKHR: return "WorldToObjectKHR"; + case BuiltInHitTKHR: return "HitTKHR"; + case BuiltInHitKindKHR: return "HitKindKHR"; + case BuiltInIncomingRayFlagsKHR: return "IncomingRayFlagsKHR"; + case BuiltInViewportMaskNV: return "ViewportMaskNV"; + case BuiltInSecondaryPositionNV: return "SecondaryPositionNV"; + case BuiltInSecondaryViewportMaskNV: return "SecondaryViewportMaskNV"; + case BuiltInPositionPerViewNV: return "PositionPerViewNV"; + case BuiltInViewportMaskPerViewNV: return "ViewportMaskPerViewNV"; // case BuiltInFragmentSizeNV: return "FragmentSizeNV"; // superseded by BuiltInFragSizeEXT // case BuiltInInvocationsPerPixelNV: return "InvocationsPerPixelNV"; // superseded by BuiltInFragInvocationCountEXT - case BuiltInBaryCoordNV: return "BaryCoordNV"; - case BuiltInBaryCoordNoPerspNV: return "BaryCoordNoPerspNV"; + case BuiltInBaryCoordNV: return "BaryCoordNV"; + case BuiltInBaryCoordNoPerspNV: return "BaryCoordNoPerspNV"; - case BuiltInFragSizeEXT: return "FragSizeEXT"; - case BuiltInFragInvocationCountEXT: return "FragInvocationCountEXT"; + case BuiltInFragSizeEXT: return "FragSizeEXT"; + case BuiltInFragInvocationCountEXT: return "FragInvocationCountEXT"; case 5264: return "FullyCoveredEXT"; @@ -890,6 +911,9 @@ const char* CapabilityString(int info) case CapabilityPerViewAttributesNV: return "PerViewAttributesNV"; case CapabilityGroupNonUniformPartitionedNV: return "GroupNonUniformPartitionedNV"; case CapabilityRayTracingNV: return "RayTracingNV"; + case CapabilityRayTracingProvisionalKHR: return "RayTracingProvisionalKHR"; + case CapabilityRayQueryProvisionalKHR: return "RayQueryProvisionalKHR"; + case CapabilityRayTraversalPrimitiveCullingProvisionalKHR: return "RayTraversalPrimitiveCullingProvisionalKHR"; case CapabilityComputeDerivativeGroupQuadsNV: return "ComputeDerivativeGroupQuadsNV"; case CapabilityComputeDerivativeGroupLinearNV: return "ComputeDerivativeGroupLinearNV"; case CapabilityFragmentBarycentricNV: return "FragmentBarycentricNV"; @@ -933,6 +957,9 @@ const char* CapabilityString(int info) case CapabilityIntegerFunctions2INTEL: return "CapabilityIntegerFunctions2INTEL"; + case CapabilityAtomicFloat32AddEXT: return "AtomicFloat32AddEXT"; + case CapabilityAtomicFloat64AddEXT: return "AtomicFloat64AddEXT"; + default: return "Bad"; } } @@ -1264,6 +1291,7 @@ const char* OpcodeString(int op) case 320: return "OpImageSparseRead"; case OpModuleProcessed: return "OpModuleProcessed"; + case OpExecutionModeId: return "OpExecutionModeId"; case OpDecorateId: return "OpDecorateId"; case 333: return "OpGroupNonUniformElect"; @@ -1308,6 +1336,8 @@ const char* OpcodeString(int op) case 4430: return "OpSubgroupAllEqualKHR"; case 4432: return "OpSubgroupReadInvocationKHR"; + case OpAtomicFAddEXT: return "OpAtomicFAddEXT"; + case 5000: return "OpGroupIAddNonUniformAMD"; case 5001: return "OpGroupFAddNonUniformAMD"; case 5002: return "OpGroupFMinNonUniformAMD"; @@ -1326,15 +1356,40 @@ const char* OpcodeString(int op) case OpMemberDecorateStringGOOGLE: return "OpMemberDecorateStringGOOGLE"; case OpGroupNonUniformPartitionNV: return "OpGroupNonUniformPartitionNV"; - case OpReportIntersectionNV: return "OpReportIntersectionNV"; - case OpIgnoreIntersectionNV: return "OpIgnoreIntersectionNV"; - case OpTerminateRayNV: return "OpTerminateRayNV"; - case OpTraceNV: return "OpTraceNV"; - case OpTypeAccelerationStructureNV: return "OpTypeAccelerationStructureNV"; - case OpExecuteCallableNV: return "OpExecuteCallableNV"; + case OpReportIntersectionKHR: return "OpReportIntersectionKHR"; + case OpIgnoreIntersectionKHR: return "OpIgnoreIntersectionKHR"; + case OpTerminateRayKHR: return "OpTerminateRayKHR"; + case OpTraceRayKHR: return "OpTraceRayKHR"; + case OpTypeAccelerationStructureKHR: return "OpTypeAccelerationStructureKHR"; + case OpExecuteCallableKHR: return "OpExecuteCallableKHR"; case OpImageSampleFootprintNV: return "OpImageSampleFootprintNV"; case OpWritePackedPrimitiveIndices4x8NV: return "OpWritePackedPrimitiveIndices4x8NV"; + case OpTypeRayQueryProvisionalKHR: return "OpTypeRayQueryProvisionalKHR"; + case OpRayQueryInitializeKHR: return "OpRayQueryInitializeKHR"; + case OpRayQueryTerminateKHR: return "OpRayQueryTerminateKHR"; + case OpRayQueryGenerateIntersectionKHR: return "OpRayQueryGenerateIntersectionKHR"; + case OpRayQueryConfirmIntersectionKHR: return "OpRayQueryConfirmIntersectionKHR"; + case OpRayQueryProceedKHR: return "OpRayQueryProceedKHR"; + case OpRayQueryGetIntersectionTypeKHR: return "OpRayQueryGetIntersectionTypeKHR"; + case OpRayQueryGetRayTMinKHR: return "OpRayQueryGetRayTMinKHR"; + case OpRayQueryGetRayFlagsKHR: return "OpRayQueryGetRayFlagsKHR"; + case OpRayQueryGetIntersectionTKHR: return "OpRayQueryGetIntersectionTKHR"; + case OpRayQueryGetIntersectionInstanceCustomIndexKHR: return "OpRayQueryGetIntersectionInstanceCustomIndexKHR"; + case OpRayQueryGetIntersectionInstanceIdKHR: return "OpRayQueryGetIntersectionInstanceIdKHR"; + case OpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffsetKHR: return "OpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffsetKHR"; + case OpRayQueryGetIntersectionGeometryIndexKHR: return "OpRayQueryGetIntersectionGeometryIndexKHR"; + case OpRayQueryGetIntersectionPrimitiveIndexKHR: return "OpRayQueryGetIntersectionPrimitiveIndexKHR"; + case OpRayQueryGetIntersectionBarycentricsKHR: return "OpRayQueryGetIntersectionBarycentricsKHR"; + case OpRayQueryGetIntersectionFrontFaceKHR: return "OpRayQueryGetIntersectionFrontFaceKHR"; + case OpRayQueryGetIntersectionCandidateAABBOpaqueKHR: return "OpRayQueryGetIntersectionCandidateAABBOpaqueKHR"; + case OpRayQueryGetIntersectionObjectRayDirectionKHR: return "OpRayQueryGetIntersectionObjectRayDirectionKHR"; + case OpRayQueryGetIntersectionObjectRayOriginKHR: return "OpRayQueryGetIntersectionObjectRayOriginKHR"; + case OpRayQueryGetWorldRayDirectionKHR: return "OpRayQueryGetWorldRayDirectionKHR"; + case OpRayQueryGetWorldRayOriginKHR: return "OpRayQueryGetWorldRayOriginKHR"; + case OpRayQueryGetIntersectionObjectToWorldKHR: return "OpRayQueryGetIntersectionObjectToWorldKHR"; + case OpRayQueryGetIntersectionWorldToObjectKHR: return "OpRayQueryGetIntersectionWorldToObjectKHR"; + case OpTypeCooperativeMatrixNV: return "OpTypeCooperativeMatrixNV"; case OpCooperativeMatrixLoadNV: return "OpCooperativeMatrixLoadNV"; case OpCooperativeMatrixStoreNV: return "OpCooperativeMatrixStoreNV"; @@ -1388,6 +1443,7 @@ void Parameterize() InstructionDesc[OpMemoryModel].setResultAndType(false, false); InstructionDesc[OpEntryPoint].setResultAndType(false, false); InstructionDesc[OpExecutionMode].setResultAndType(false, false); + InstructionDesc[OpExecutionModeId].setResultAndType(false, false); InstructionDesc[OpTypeVoid].setResultAndType(true, false); InstructionDesc[OpTypeBool].setResultAndType(true, false); InstructionDesc[OpTypeInt].setResultAndType(true, false); @@ -1574,6 +1630,10 @@ void Parameterize() InstructionDesc[OpExecutionMode].operands.push(OperandExecutionMode, "'Mode'"); InstructionDesc[OpExecutionMode].operands.push(OperandOptionalLiteral, "See <>"); + InstructionDesc[OpExecutionModeId].operands.push(OperandId, "'Entry Point'"); + InstructionDesc[OpExecutionModeId].operands.push(OperandExecutionMode, "'Mode'"); + InstructionDesc[OpExecutionModeId].operands.push(OperandVariableIds, "See <>"); + InstructionDesc[OpTypeInt].operands.push(OperandLiteralNumber, "'Width'"); InstructionDesc[OpTypeInt].operands.push(OperandLiteralNumber, "'Signedness'"); @@ -1667,7 +1727,7 @@ void Parameterize() InstructionDesc[OpDecorateStringGOOGLE].operands.push(OperandId, "'Target'"); InstructionDesc[OpDecorateStringGOOGLE].operands.push(OperandDecoration, ""); - InstructionDesc[OpDecorateStringGOOGLE].operands.push(OperandLiteralString, "'Literal String'"); + InstructionDesc[OpDecorateStringGOOGLE].operands.push(OperandVariableLiteralStrings, "'Literal Strings'"); InstructionDesc[OpMemberDecorate].operands.push(OperandId, "'Structure Type'"); InstructionDesc[OpMemberDecorate].operands.push(OperandLiteralNumber, "'Member'"); @@ -1677,7 +1737,7 @@ void Parameterize() InstructionDesc[OpMemberDecorateStringGOOGLE].operands.push(OperandId, "'Structure Type'"); InstructionDesc[OpMemberDecorateStringGOOGLE].operands.push(OperandLiteralNumber, "'Member'"); InstructionDesc[OpMemberDecorateStringGOOGLE].operands.push(OperandDecoration, ""); - InstructionDesc[OpMemberDecorateStringGOOGLE].operands.push(OperandLiteralString, "'Literal String'"); + InstructionDesc[OpMemberDecorateStringGOOGLE].operands.push(OperandVariableLiteralStrings, "'Literal Strings'"); InstructionDesc[OpGroupDecorate].operands.push(OperandId, "'Decoration Group'"); InstructionDesc[OpGroupDecorate].operands.push(OperandVariableIds, "'Targets'"); @@ -2230,6 +2290,11 @@ void Parameterize() InstructionDesc[OpAtomicIAdd].operands.push(OperandMemorySemantics, "'Semantics'"); InstructionDesc[OpAtomicIAdd].operands.push(OperandId, "'Value'"); + InstructionDesc[OpAtomicFAddEXT].operands.push(OperandId, "'Pointer'"); + InstructionDesc[OpAtomicFAddEXT].operands.push(OperandScope, "'Scope'"); + InstructionDesc[OpAtomicFAddEXT].operands.push(OperandMemorySemantics, "'Semantics'"); + InstructionDesc[OpAtomicFAddEXT].operands.push(OperandId, "'Value'"); + InstructionDesc[OpAtomicISub].operands.push(OperandId, "'Pointer'"); InstructionDesc[OpAtomicISub].operands.push(OperandScope, "'Scope'"); InstructionDesc[OpAtomicISub].operands.push(OperandMemorySemantics, "'Semantics'"); @@ -2694,31 +2759,125 @@ void Parameterize() InstructionDesc[OpGroupNonUniformPartitionNV].operands.push(OperandId, "X"); - InstructionDesc[OpTypeAccelerationStructureNV].setResultAndType(true, false); + InstructionDesc[OpTypeAccelerationStructureKHR].setResultAndType(true, false); - InstructionDesc[OpTraceNV].operands.push(OperandId, "'NV Acceleration Structure'"); - InstructionDesc[OpTraceNV].operands.push(OperandId, "'Ray Flags'"); - InstructionDesc[OpTraceNV].operands.push(OperandId, "'Cull Mask'"); - InstructionDesc[OpTraceNV].operands.push(OperandId, "'SBT Record Offset'"); - InstructionDesc[OpTraceNV].operands.push(OperandId, "'SBT Record Stride'"); - InstructionDesc[OpTraceNV].operands.push(OperandId, "'Miss Index'"); - InstructionDesc[OpTraceNV].operands.push(OperandId, "'Ray Origin'"); - InstructionDesc[OpTraceNV].operands.push(OperandId, "'TMin'"); - InstructionDesc[OpTraceNV].operands.push(OperandId, "'Ray Direction'"); - InstructionDesc[OpTraceNV].operands.push(OperandId, "'TMax'"); - InstructionDesc[OpTraceNV].operands.push(OperandId, "'Payload'"); - InstructionDesc[OpTraceNV].setResultAndType(false, false); + InstructionDesc[OpTraceRayKHR].operands.push(OperandId, "'NV Acceleration Structure'"); + InstructionDesc[OpTraceRayKHR].operands.push(OperandId, "'Ray Flags'"); + InstructionDesc[OpTraceRayKHR].operands.push(OperandId, "'Cull Mask'"); + InstructionDesc[OpTraceRayKHR].operands.push(OperandId, "'SBT Record Offset'"); + InstructionDesc[OpTraceRayKHR].operands.push(OperandId, "'SBT Record Stride'"); + InstructionDesc[OpTraceRayKHR].operands.push(OperandId, "'Miss Index'"); + InstructionDesc[OpTraceRayKHR].operands.push(OperandId, "'Ray Origin'"); + InstructionDesc[OpTraceRayKHR].operands.push(OperandId, "'TMin'"); + InstructionDesc[OpTraceRayKHR].operands.push(OperandId, "'Ray Direction'"); + InstructionDesc[OpTraceRayKHR].operands.push(OperandId, "'TMax'"); + InstructionDesc[OpTraceRayKHR].operands.push(OperandId, "'Payload'"); + InstructionDesc[OpTraceRayKHR].setResultAndType(false, false); - InstructionDesc[OpReportIntersectionNV].operands.push(OperandId, "'Hit Parameter'"); - InstructionDesc[OpReportIntersectionNV].operands.push(OperandId, "'Hit Kind'"); + InstructionDesc[OpReportIntersectionKHR].operands.push(OperandId, "'Hit Parameter'"); + InstructionDesc[OpReportIntersectionKHR].operands.push(OperandId, "'Hit Kind'"); - InstructionDesc[OpIgnoreIntersectionNV].setResultAndType(false, false); + InstructionDesc[OpIgnoreIntersectionKHR].setResultAndType(false, false); - InstructionDesc[OpTerminateRayNV].setResultAndType(false, false); + InstructionDesc[OpTerminateRayKHR].setResultAndType(false, false); - InstructionDesc[OpExecuteCallableNV].operands.push(OperandId, "SBT Record Index"); - InstructionDesc[OpExecuteCallableNV].operands.push(OperandId, "CallableData ID"); - InstructionDesc[OpExecuteCallableNV].setResultAndType(false, false); + InstructionDesc[OpExecuteCallableKHR].operands.push(OperandId, "SBT Record Index"); + InstructionDesc[OpExecuteCallableKHR].operands.push(OperandId, "CallableData ID"); + InstructionDesc[OpExecuteCallableKHR].setResultAndType(false, false); + + // Ray Query + InstructionDesc[OpTypeAccelerationStructureKHR].setResultAndType(true, false); + InstructionDesc[OpTypeRayQueryProvisionalKHR].setResultAndType(true, false); + + InstructionDesc[OpRayQueryInitializeKHR].operands.push(OperandId, "'RayQuery'"); + InstructionDesc[OpRayQueryInitializeKHR].operands.push(OperandId, "'AccelerationS'"); + InstructionDesc[OpRayQueryInitializeKHR].operands.push(OperandId, "'RayFlags'"); + InstructionDesc[OpRayQueryInitializeKHR].operands.push(OperandId, "'CullMask'"); + InstructionDesc[OpRayQueryInitializeKHR].operands.push(OperandId, "'Origin'"); + InstructionDesc[OpRayQueryInitializeKHR].operands.push(OperandId, "'Tmin'"); + InstructionDesc[OpRayQueryInitializeKHR].operands.push(OperandId, "'Direction'"); + InstructionDesc[OpRayQueryInitializeKHR].operands.push(OperandId, "'Tmax'"); + InstructionDesc[OpRayQueryInitializeKHR].setResultAndType(false, false); + + InstructionDesc[OpRayQueryTerminateKHR].operands.push(OperandId, "'RayQuery'"); + InstructionDesc[OpRayQueryTerminateKHR].setResultAndType(false, false); + + InstructionDesc[OpRayQueryGenerateIntersectionKHR].operands.push(OperandId, "'RayQuery'"); + InstructionDesc[OpRayQueryGenerateIntersectionKHR].operands.push(OperandId, "'THit'"); + InstructionDesc[OpRayQueryGenerateIntersectionKHR].setResultAndType(false, false); + + InstructionDesc[OpRayQueryConfirmIntersectionKHR].operands.push(OperandId, "'RayQuery'"); + InstructionDesc[OpRayQueryConfirmIntersectionKHR].setResultAndType(false, false); + + InstructionDesc[OpRayQueryProceedKHR].operands.push(OperandId, "'RayQuery'"); + InstructionDesc[OpRayQueryProceedKHR].setResultAndType(true, true); + + InstructionDesc[OpRayQueryGetIntersectionTypeKHR].operands.push(OperandId, "'RayQuery'"); + InstructionDesc[OpRayQueryGetIntersectionTypeKHR].operands.push(OperandId, "'Committed'"); + InstructionDesc[OpRayQueryGetIntersectionTypeKHR].setResultAndType(true, true); + + InstructionDesc[OpRayQueryGetRayTMinKHR].operands.push(OperandId, "'RayQuery'"); + InstructionDesc[OpRayQueryGetRayTMinKHR].setResultAndType(true, true); + + InstructionDesc[OpRayQueryGetRayFlagsKHR].operands.push(OperandId, "'RayQuery'"); + InstructionDesc[OpRayQueryGetRayFlagsKHR].setResultAndType(true, true); + + InstructionDesc[OpRayQueryGetIntersectionTKHR].operands.push(OperandId, "'RayQuery'"); + InstructionDesc[OpRayQueryGetIntersectionTKHR].operands.push(OperandId, "'Committed'"); + InstructionDesc[OpRayQueryGetIntersectionTKHR].setResultAndType(true, true); + + InstructionDesc[OpRayQueryGetIntersectionInstanceCustomIndexKHR].operands.push(OperandId, "'RayQuery'"); + InstructionDesc[OpRayQueryGetIntersectionInstanceCustomIndexKHR].operands.push(OperandId, "'Committed'"); + InstructionDesc[OpRayQueryGetIntersectionInstanceCustomIndexKHR].setResultAndType(true, true); + + InstructionDesc[OpRayQueryGetIntersectionInstanceIdKHR].operands.push(OperandId, "'RayQuery'"); + InstructionDesc[OpRayQueryGetIntersectionInstanceIdKHR].operands.push(OperandId, "'Committed'"); + InstructionDesc[OpRayQueryGetIntersectionInstanceIdKHR].setResultAndType(true, true); + + InstructionDesc[OpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffsetKHR].operands.push(OperandId, "'RayQuery'"); + InstructionDesc[OpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffsetKHR].operands.push(OperandId, "'Committed'"); + InstructionDesc[OpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffsetKHR].setResultAndType(true, true); + + InstructionDesc[OpRayQueryGetIntersectionGeometryIndexKHR].operands.push(OperandId, "'RayQuery'"); + InstructionDesc[OpRayQueryGetIntersectionGeometryIndexKHR].operands.push(OperandId, "'Committed'"); + InstructionDesc[OpRayQueryGetIntersectionGeometryIndexKHR].setResultAndType(true, true); + + InstructionDesc[OpRayQueryGetIntersectionPrimitiveIndexKHR].operands.push(OperandId, "'RayQuery'"); + InstructionDesc[OpRayQueryGetIntersectionPrimitiveIndexKHR].operands.push(OperandId, "'Committed'"); + InstructionDesc[OpRayQueryGetIntersectionPrimitiveIndexKHR].setResultAndType(true, true); + + InstructionDesc[OpRayQueryGetIntersectionBarycentricsKHR].operands.push(OperandId, "'RayQuery'"); + InstructionDesc[OpRayQueryGetIntersectionBarycentricsKHR].operands.push(OperandId, "'Committed'"); + InstructionDesc[OpRayQueryGetIntersectionBarycentricsKHR].setResultAndType(true, true); + + InstructionDesc[OpRayQueryGetIntersectionFrontFaceKHR].operands.push(OperandId, "'RayQuery'"); + InstructionDesc[OpRayQueryGetIntersectionFrontFaceKHR].operands.push(OperandId, "'Committed'"); + InstructionDesc[OpRayQueryGetIntersectionFrontFaceKHR].setResultAndType(true, true); + + InstructionDesc[OpRayQueryGetIntersectionCandidateAABBOpaqueKHR].operands.push(OperandId, "'RayQuery'"); + InstructionDesc[OpRayQueryGetIntersectionCandidateAABBOpaqueKHR].setResultAndType(true, true); + + InstructionDesc[OpRayQueryGetIntersectionObjectRayDirectionKHR].operands.push(OperandId, "'RayQuery'"); + InstructionDesc[OpRayQueryGetIntersectionObjectRayDirectionKHR].operands.push(OperandId, "'Committed'"); + InstructionDesc[OpRayQueryGetIntersectionObjectRayDirectionKHR].setResultAndType(true, true); + + InstructionDesc[OpRayQueryGetIntersectionObjectRayOriginKHR].operands.push(OperandId, "'RayQuery'"); + InstructionDesc[OpRayQueryGetIntersectionObjectRayOriginKHR].operands.push(OperandId, "'Committed'"); + InstructionDesc[OpRayQueryGetIntersectionObjectRayOriginKHR].setResultAndType(true, true); + + InstructionDesc[OpRayQueryGetWorldRayDirectionKHR].operands.push(OperandId, "'RayQuery'"); + InstructionDesc[OpRayQueryGetWorldRayDirectionKHR].setResultAndType(true, true); + + InstructionDesc[OpRayQueryGetWorldRayOriginKHR].operands.push(OperandId, "'RayQuery'"); + InstructionDesc[OpRayQueryGetWorldRayOriginKHR].setResultAndType(true, true); + + InstructionDesc[OpRayQueryGetIntersectionObjectToWorldKHR].operands.push(OperandId, "'RayQuery'"); + InstructionDesc[OpRayQueryGetIntersectionObjectToWorldKHR].operands.push(OperandId, "'Committed'"); + InstructionDesc[OpRayQueryGetIntersectionObjectToWorldKHR].setResultAndType(true, true); + + InstructionDesc[OpRayQueryGetIntersectionWorldToObjectKHR].operands.push(OperandId, "'RayQuery'"); + InstructionDesc[OpRayQueryGetIntersectionWorldToObjectKHR].operands.push(OperandId, "'Committed'"); + InstructionDesc[OpRayQueryGetIntersectionWorldToObjectKHR].setResultAndType(true, true); InstructionDesc[OpImageSampleFootprintNV].operands.push(OperandId, "'Sampled Image'"); InstructionDesc[OpImageSampleFootprintNV].operands.push(OperandId, "'Coordinate'"); diff --git a/thirdparty/glslang/SPIRV/doc.h b/thirdparty/glslang/SPIRV/doc.h index 293256a2c68..2a0b28c6b3a 100644 --- a/thirdparty/glslang/SPIRV/doc.h +++ b/thirdparty/glslang/SPIRV/doc.h @@ -125,6 +125,7 @@ enum OperandClass { OperandVariableLiteralId, OperandLiteralNumber, OperandLiteralString, + OperandVariableLiteralStrings, OperandSource, OperandExecutionModel, OperandAddressing, diff --git a/thirdparty/glslang/SPIRV/hex_float.h b/thirdparty/glslang/SPIRV/hex_float.h index 905b21a45ad..8be8e9f7e36 100644 --- a/thirdparty/glslang/SPIRV/hex_float.h +++ b/thirdparty/glslang/SPIRV/hex_float.h @@ -784,8 +784,8 @@ inline std::istream& ParseNormalFloat(std::istream& is, bool negate_value, if (val.isInfinity()) { // Fail the parse. Emulate standard behaviour by setting the value to // the closest normal value, and set the fail bit on the stream. - value.set_value((value.isNegative() | negate_value) ? T::lowest() - : T::max()); + value.set_value((value.isNegative() || negate_value) ? T::lowest() + : T::max()); is.setstate(std::ios_base::failbit); } return is; diff --git a/thirdparty/glslang/SPIRV/spirv.hpp b/thirdparty/glslang/SPIRV/spirv.hpp index 1e96f7b4a96..35482ea5e29 100644 --- a/thirdparty/glslang/SPIRV/spirv.hpp +++ b/thirdparty/glslang/SPIRV/spirv.hpp @@ -1,4 +1,4 @@ -// Copyright (c) 2014-2019 The Khronos Group Inc. +// Copyright (c) 2014-2020 The Khronos Group Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and/or associated documentation files (the "Materials"), @@ -49,12 +49,12 @@ namespace spv { typedef unsigned int Id; -#define SPV_VERSION 0x10400 -#define SPV_REVISION 1 +#define SPV_VERSION 0x10500 +#define SPV_REVISION 3 static const unsigned int MagicNumber = 0x07230203; -static const unsigned int Version = 0x00010400; -static const unsigned int Revision = 1; +static const unsigned int Version = 0x00010500; +static const unsigned int Revision = 3; static const unsigned int OpCodeMask = 0xffff; static const unsigned int WordCountShift = 16; @@ -78,11 +78,17 @@ enum ExecutionModel { ExecutionModelKernel = 6, ExecutionModelTaskNV = 5267, ExecutionModelMeshNV = 5268, + ExecutionModelRayGenerationKHR = 5313, ExecutionModelRayGenerationNV = 5313, + ExecutionModelIntersectionKHR = 5314, ExecutionModelIntersectionNV = 5314, + ExecutionModelAnyHitKHR = 5315, ExecutionModelAnyHitNV = 5315, + ExecutionModelClosestHitKHR = 5316, ExecutionModelClosestHitNV = 5316, + ExecutionModelMissKHR = 5317, ExecutionModelMissNV = 5317, + ExecutionModelCallableKHR = 5318, ExecutionModelCallableNV = 5318, ExecutionModelMax = 0x7fffffff, }; @@ -162,6 +168,10 @@ enum ExecutionMode { ExecutionModeSampleInterlockUnorderedEXT = 5369, ExecutionModeShadingRateInterlockOrderedEXT = 5370, ExecutionModeShadingRateInterlockUnorderedEXT = 5371, + ExecutionModeMaxWorkgroupSizeINTEL = 5893, + ExecutionModeMaxWorkDimINTEL = 5894, + ExecutionModeNoGlobalOffsetINTEL = 5895, + ExecutionModeNumSIMDWorkitemsINTEL = 5896, ExecutionModeMax = 0x7fffffff, }; @@ -179,14 +189,21 @@ enum StorageClass { StorageClassAtomicCounter = 10, StorageClassImage = 11, StorageClassStorageBuffer = 12, + StorageClassCallableDataKHR = 5328, StorageClassCallableDataNV = 5328, + StorageClassIncomingCallableDataKHR = 5329, StorageClassIncomingCallableDataNV = 5329, + StorageClassRayPayloadKHR = 5338, StorageClassRayPayloadNV = 5338, + StorageClassHitAttributeKHR = 5339, StorageClassHitAttributeNV = 5339, + StorageClassIncomingRayPayloadKHR = 5342, StorageClassIncomingRayPayloadNV = 5342, + StorageClassShaderRecordBufferKHR = 5343, StorageClassShaderRecordBufferNV = 5343, StorageClassPhysicalStorageBuffer = 5349, StorageClassPhysicalStorageBufferEXT = 5349, + StorageClassCodeSectionINTEL = 5605, StorageClassMax = 0x7fffffff, }; @@ -465,11 +482,24 @@ enum Decoration { DecorationRestrictPointerEXT = 5355, DecorationAliasedPointer = 5356, DecorationAliasedPointerEXT = 5356, + DecorationReferencedIndirectlyINTEL = 5602, DecorationCounterBuffer = 5634, DecorationHlslCounterBufferGOOGLE = 5634, DecorationHlslSemanticGOOGLE = 5635, DecorationUserSemantic = 5635, DecorationUserTypeGOOGLE = 5636, + DecorationRegisterINTEL = 5825, + DecorationMemoryINTEL = 5826, + DecorationNumbanksINTEL = 5827, + DecorationBankwidthINTEL = 5828, + DecorationMaxPrivateCopiesINTEL = 5829, + DecorationSinglepumpINTEL = 5830, + DecorationDoublepumpINTEL = 5831, + DecorationMaxReplicatesINTEL = 5832, + DecorationSimpleDualPortINTEL = 5833, + DecorationMergeINTEL = 5834, + DecorationBankBitsINTEL = 5835, + DecorationForcePow2DepthINTEL = 5836, DecorationMax = 0x7fffffff, }; @@ -558,20 +588,35 @@ enum BuiltIn { BuiltInFragmentSizeNV = 5292, BuiltInFragInvocationCountEXT = 5293, BuiltInInvocationsPerPixelNV = 5293, + BuiltInLaunchIdKHR = 5319, BuiltInLaunchIdNV = 5319, + BuiltInLaunchSizeKHR = 5320, BuiltInLaunchSizeNV = 5320, + BuiltInWorldRayOriginKHR = 5321, BuiltInWorldRayOriginNV = 5321, + BuiltInWorldRayDirectionKHR = 5322, BuiltInWorldRayDirectionNV = 5322, + BuiltInObjectRayOriginKHR = 5323, BuiltInObjectRayOriginNV = 5323, + BuiltInObjectRayDirectionKHR = 5324, BuiltInObjectRayDirectionNV = 5324, + BuiltInRayTminKHR = 5325, BuiltInRayTminNV = 5325, + BuiltInRayTmaxKHR = 5326, BuiltInRayTmaxNV = 5326, + BuiltInInstanceCustomIndexKHR = 5327, BuiltInInstanceCustomIndexNV = 5327, + BuiltInObjectToWorldKHR = 5330, BuiltInObjectToWorldNV = 5330, + BuiltInWorldToObjectKHR = 5331, BuiltInWorldToObjectNV = 5331, + BuiltInHitTKHR = 5332, BuiltInHitTNV = 5332, + BuiltInHitKindKHR = 5333, BuiltInHitKindNV = 5333, + BuiltInIncomingRayFlagsKHR = 5351, BuiltInIncomingRayFlagsNV = 5351, + BuiltInRayGeometryIndexKHR = 5352, BuiltInWarpsPerSMNV = 5374, BuiltInSMCountNV = 5375, BuiltInWarpIDNV = 5376, @@ -601,6 +646,13 @@ enum LoopControlShift { LoopControlIterationMultipleShift = 6, LoopControlPeelCountShift = 7, LoopControlPartialCountShift = 8, + LoopControlInitiationIntervalINTELShift = 16, + LoopControlMaxConcurrencyINTELShift = 17, + LoopControlDependencyArrayINTELShift = 18, + LoopControlPipelineEnableINTELShift = 19, + LoopControlLoopCoalesceINTELShift = 20, + LoopControlMaxInterleavingINTELShift = 21, + LoopControlSpeculatedIterationsINTELShift = 22, LoopControlMax = 0x7fffffff, }; @@ -615,6 +667,13 @@ enum LoopControlMask { LoopControlIterationMultipleMask = 0x00000040, LoopControlPeelCountMask = 0x00000080, LoopControlPartialCountMask = 0x00000100, + LoopControlInitiationIntervalINTELMask = 0x00010000, + LoopControlMaxConcurrencyINTELMask = 0x00020000, + LoopControlDependencyArrayINTELMask = 0x00040000, + LoopControlPipelineEnableINTELMask = 0x00080000, + LoopControlLoopCoalesceINTELMask = 0x00100000, + LoopControlMaxInterleavingINTELMask = 0x00200000, + LoopControlSpeculatedIterationsINTELMask = 0x00400000, }; enum FunctionControlShift { @@ -709,6 +768,7 @@ enum Scope { ScopeInvocation = 4, ScopeQueueFamily = 5, ScopeQueueFamilyKHR = 5, + ScopeShaderCallKHR = 6, ScopeMax = 0x7fffffff, }; @@ -833,6 +893,8 @@ enum Capability { CapabilitySignedZeroInfNanPreserve = 4466, CapabilityRoundingModeRTE = 4467, CapabilityRoundingModeRTZ = 4468, + CapabilityRayQueryProvisionalKHR = 4471, + CapabilityRayTraversalPrimitiveCullingProvisionalKHR = 4478, CapabilityFloat16ImageAMD = 5008, CapabilityImageGatherBiasLodAMD = 5009, CapabilityFragmentMaskAMD = 5010, @@ -886,6 +948,7 @@ enum Capability { CapabilityPhysicalStorageBufferAddresses = 5347, CapabilityPhysicalStorageBufferAddressesEXT = 5347, CapabilityComputeDerivativeGroupLinearNV = 5350, + CapabilityRayTracingProvisionalKHR = 5353, CapabilityCooperativeMatrixNV = 5357, CapabilityFragmentShaderSampleInterlockEXT = 5363, CapabilityFragmentShaderShadingRateInterlockEXT = 5372, @@ -897,12 +960,70 @@ enum Capability { CapabilitySubgroupImageBlockIOINTEL = 5570, CapabilitySubgroupImageMediaBlockIOINTEL = 5579, CapabilityIntegerFunctions2INTEL = 5584, + CapabilityFunctionPointersINTEL = 5603, + CapabilityIndirectReferencesINTEL = 5604, CapabilitySubgroupAvcMotionEstimationINTEL = 5696, CapabilitySubgroupAvcMotionEstimationIntraINTEL = 5697, CapabilitySubgroupAvcMotionEstimationChromaINTEL = 5698, + CapabilityFPGAMemoryAttributesINTEL = 5824, + CapabilityUnstructuredLoopControlsINTEL = 5886, + CapabilityFPGALoopControlsINTEL = 5888, + CapabilityKernelAttributesINTEL = 5892, + CapabilityFPGAKernelAttributesINTEL = 5897, + CapabilityBlockingPipesINTEL = 5945, + CapabilityFPGARegINTEL = 5948, + CapabilityAtomicFloat32AddEXT = 6033, + CapabilityAtomicFloat64AddEXT = 6034, CapabilityMax = 0x7fffffff, }; +enum RayFlagsShift { + RayFlagsOpaqueKHRShift = 0, + RayFlagsNoOpaqueKHRShift = 1, + RayFlagsTerminateOnFirstHitKHRShift = 2, + RayFlagsSkipClosestHitShaderKHRShift = 3, + RayFlagsCullBackFacingTrianglesKHRShift = 4, + RayFlagsCullFrontFacingTrianglesKHRShift = 5, + RayFlagsCullOpaqueKHRShift = 6, + RayFlagsCullNoOpaqueKHRShift = 7, + RayFlagsSkipTrianglesKHRShift = 8, + RayFlagsSkipAABBsKHRShift = 9, + RayFlagsMax = 0x7fffffff, +}; + +enum RayFlagsMask { + RayFlagsMaskNone = 0, + RayFlagsOpaqueKHRMask = 0x00000001, + RayFlagsNoOpaqueKHRMask = 0x00000002, + RayFlagsTerminateOnFirstHitKHRMask = 0x00000004, + RayFlagsSkipClosestHitShaderKHRMask = 0x00000008, + RayFlagsCullBackFacingTrianglesKHRMask = 0x00000010, + RayFlagsCullFrontFacingTrianglesKHRMask = 0x00000020, + RayFlagsCullOpaqueKHRMask = 0x00000040, + RayFlagsCullNoOpaqueKHRMask = 0x00000080, + RayFlagsSkipTrianglesKHRMask = 0x00000100, + RayFlagsSkipAABBsKHRMask = 0x00000200, +}; + +enum RayQueryIntersection { + RayQueryIntersectionRayQueryCandidateIntersectionKHR = 0, + RayQueryIntersectionRayQueryCommittedIntersectionKHR = 1, + RayQueryIntersectionMax = 0x7fffffff, +}; + +enum RayQueryCommittedIntersectionType { + RayQueryCommittedIntersectionTypeRayQueryCommittedIntersectionNoneKHR = 0, + RayQueryCommittedIntersectionTypeRayQueryCommittedIntersectionTriangleKHR = 1, + RayQueryCommittedIntersectionTypeRayQueryCommittedIntersectionGeneratedKHR = 2, + RayQueryCommittedIntersectionTypeMax = 0x7fffffff, +}; + +enum RayQueryCandidateIntersectionType { + RayQueryCandidateIntersectionTypeRayQueryCandidateIntersectionTriangleKHR = 0, + RayQueryCandidateIntersectionTypeRayQueryCandidateIntersectionAABBKHR = 1, + RayQueryCandidateIntersectionTypeMax = 0x7fffffff, +}; + enum Op { OpNop = 0, OpUndef = 1, @@ -1254,6 +1375,13 @@ enum Op { OpSubgroupAnyKHR = 4429, OpSubgroupAllEqualKHR = 4430, OpSubgroupReadInvocationKHR = 4432, + OpTypeRayQueryProvisionalKHR = 4472, + OpRayQueryInitializeKHR = 4473, + OpRayQueryTerminateKHR = 4474, + OpRayQueryGenerateIntersectionKHR = 4475, + OpRayQueryConfirmIntersectionKHR = 4476, + OpRayQueryProceedKHR = 4477, + OpRayQueryGetIntersectionTypeKHR = 4479, OpGroupIAddNonUniformAMD = 5000, OpGroupFAddNonUniformAMD = 5001, OpGroupFMinNonUniformAMD = 5002, @@ -1268,11 +1396,17 @@ enum Op { OpImageSampleFootprintNV = 5283, OpGroupNonUniformPartitionNV = 5296, OpWritePackedPrimitiveIndices4x8NV = 5299, + OpReportIntersectionKHR = 5334, OpReportIntersectionNV = 5334, + OpIgnoreIntersectionKHR = 5335, OpIgnoreIntersectionNV = 5335, + OpTerminateRayKHR = 5336, OpTerminateRayNV = 5336, OpTraceNV = 5337, + OpTraceRayKHR = 5337, + OpTypeAccelerationStructureKHR = 5341, OpTypeAccelerationStructureNV = 5341, + OpExecuteCallableKHR = 5344, OpExecuteCallableNV = 5344, OpTypeCooperativeMatrixNV = 5358, OpCooperativeMatrixLoadNV = 5359, @@ -1307,6 +1441,8 @@ enum Op { OpUSubSatINTEL = 5596, OpIMul32x16INTEL = 5597, OpUMul32x16INTEL = 5598, + OpFunctionPointerINTEL = 5600, + OpFunctionPointerCallINTEL = 5601, OpDecorateString = 5632, OpDecorateStringGOOGLE = 5632, OpMemberDecorateString = 5633, @@ -1429,6 +1565,28 @@ enum Op { OpSubgroupAvcSicGetPackedSkcLumaCountThresholdINTEL = 5814, OpSubgroupAvcSicGetPackedSkcLumaSumThresholdINTEL = 5815, OpSubgroupAvcSicGetInterRawSadsINTEL = 5816, + OpLoopControlINTEL = 5887, + OpReadPipeBlockingINTEL = 5946, + OpWritePipeBlockingINTEL = 5947, + OpFPGARegINTEL = 5949, + OpRayQueryGetRayTMinKHR = 6016, + OpRayQueryGetRayFlagsKHR = 6017, + OpRayQueryGetIntersectionTKHR = 6018, + OpRayQueryGetIntersectionInstanceCustomIndexKHR = 6019, + OpRayQueryGetIntersectionInstanceIdKHR = 6020, + OpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffsetKHR = 6021, + OpRayQueryGetIntersectionGeometryIndexKHR = 6022, + OpRayQueryGetIntersectionPrimitiveIndexKHR = 6023, + OpRayQueryGetIntersectionBarycentricsKHR = 6024, + OpRayQueryGetIntersectionFrontFaceKHR = 6025, + OpRayQueryGetIntersectionCandidateAABBOpaqueKHR = 6026, + OpRayQueryGetIntersectionObjectRayDirectionKHR = 6027, + OpRayQueryGetIntersectionObjectRayOriginKHR = 6028, + OpRayQueryGetWorldRayDirectionKHR = 6029, + OpRayQueryGetWorldRayOriginKHR = 6030, + OpRayQueryGetIntersectionObjectToWorldKHR = 6031, + OpRayQueryGetIntersectionWorldToObjectKHR = 6032, + OpAtomicFAddEXT = 6035, OpMax = 0x7fffffff, }; @@ -1787,6 +1945,13 @@ inline void HasResultAndType(Op opcode, bool *hasResult, bool *hasResultType) { case OpSubgroupAnyKHR: *hasResult = true; *hasResultType = true; break; case OpSubgroupAllEqualKHR: *hasResult = true; *hasResultType = true; break; case OpSubgroupReadInvocationKHR: *hasResult = true; *hasResultType = true; break; + case OpTypeRayQueryProvisionalKHR: *hasResult = true; *hasResultType = false; break; + case OpRayQueryInitializeKHR: *hasResult = false; *hasResultType = false; break; + case OpRayQueryTerminateKHR: *hasResult = false; *hasResultType = false; break; + case OpRayQueryGenerateIntersectionKHR: *hasResult = false; *hasResultType = false; break; + case OpRayQueryConfirmIntersectionKHR: *hasResult = false; *hasResultType = false; break; + case OpRayQueryProceedKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetIntersectionTypeKHR: *hasResult = true; *hasResultType = true; break; case OpGroupIAddNonUniformAMD: *hasResult = true; *hasResultType = true; break; case OpGroupFAddNonUniformAMD: *hasResult = true; *hasResultType = true; break; case OpGroupFMinNonUniformAMD: *hasResult = true; *hasResultType = true; break; @@ -1840,6 +2005,8 @@ inline void HasResultAndType(Op opcode, bool *hasResult, bool *hasResultType) { case OpUSubSatINTEL: *hasResult = true; *hasResultType = true; break; case OpIMul32x16INTEL: *hasResult = true; *hasResultType = true; break; case OpUMul32x16INTEL: *hasResult = true; *hasResultType = true; break; + case OpFunctionPointerINTEL: *hasResult = true; *hasResultType = true; break; + case OpFunctionPointerCallINTEL: *hasResult = true; *hasResultType = true; break; case OpDecorateString: *hasResult = false; *hasResultType = false; break; case OpMemberDecorateString: *hasResult = false; *hasResultType = false; break; case OpVmeImageINTEL: *hasResult = true; *hasResultType = true; break; @@ -1960,6 +2127,28 @@ inline void HasResultAndType(Op opcode, bool *hasResult, bool *hasResultType) { case OpSubgroupAvcSicGetPackedSkcLumaCountThresholdINTEL: *hasResult = true; *hasResultType = true; break; case OpSubgroupAvcSicGetPackedSkcLumaSumThresholdINTEL: *hasResult = true; *hasResultType = true; break; case OpSubgroupAvcSicGetInterRawSadsINTEL: *hasResult = true; *hasResultType = true; break; + case OpLoopControlINTEL: *hasResult = false; *hasResultType = false; break; + case OpReadPipeBlockingINTEL: *hasResult = true; *hasResultType = true; break; + case OpWritePipeBlockingINTEL: *hasResult = true; *hasResultType = true; break; + case OpFPGARegINTEL: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetRayTMinKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetRayFlagsKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetIntersectionTKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetIntersectionInstanceCustomIndexKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetIntersectionInstanceIdKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffsetKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetIntersectionGeometryIndexKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetIntersectionPrimitiveIndexKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetIntersectionBarycentricsKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetIntersectionFrontFaceKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetIntersectionCandidateAABBOpaqueKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetIntersectionObjectRayDirectionKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetIntersectionObjectRayOriginKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetWorldRayDirectionKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetWorldRayOriginKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetIntersectionObjectToWorldKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetIntersectionWorldToObjectKHR: *hasResult = true; *hasResultType = true; break; + case OpAtomicFAddEXT: *hasResult = true; *hasResultType = true; break; } } #endif /* SPV_ENABLE_UTILITY_CODE */ @@ -1974,6 +2163,7 @@ inline FunctionControlMask operator|(FunctionControlMask a, FunctionControlMask inline MemorySemanticsMask operator|(MemorySemanticsMask a, MemorySemanticsMask b) { return MemorySemanticsMask(unsigned(a) | unsigned(b)); } inline MemoryAccessMask operator|(MemoryAccessMask a, MemoryAccessMask b) { return MemoryAccessMask(unsigned(a) | unsigned(b)); } inline KernelProfilingInfoMask operator|(KernelProfilingInfoMask a, KernelProfilingInfoMask b) { return KernelProfilingInfoMask(unsigned(a) | unsigned(b)); } +inline RayFlagsMask operator|(RayFlagsMask a, RayFlagsMask b) { return RayFlagsMask(unsigned(a) | unsigned(b)); } } // end namespace spv diff --git a/thirdparty/glslang/SPIRV/spvIR.h b/thirdparty/glslang/SPIRV/spvIR.h old mode 100755 new mode 100644 index cf6a71159af..868b9bf82d4 --- a/thirdparty/glslang/SPIRV/spvIR.h +++ b/thirdparty/glslang/SPIRV/spvIR.h @@ -55,6 +55,7 @@ #include #include #include +#include namespace spv { @@ -235,8 +236,7 @@ public: assert(instructions.size() > 0); instructions.resize(1); successors.clear(); - Instruction* unreachable = new Instruction(OpUnreachable); - addInstruction(std::unique_ptr(unreachable)); + addInstruction(std::unique_ptr(new Instruction(OpUnreachable))); } // Change this block into a canonical dead continue target branching to the // given header ID. Delete instructions as necessary. A canonical dead continue @@ -352,10 +352,28 @@ public: const std::vector& getBlocks() const { return blocks; } void addLocalVariable(std::unique_ptr inst); Id getReturnType() const { return functionInstruction.getTypeId(); } + void setReturnPrecision(Decoration precision) + { + if (precision == DecorationRelaxedPrecision) + reducedPrecisionReturn = true; + } + Decoration getReturnPrecision() const + { return reducedPrecisionReturn ? DecorationRelaxedPrecision : NoPrecision; } void setImplicitThis() { implicitThis = true; } bool hasImplicitThis() const { return implicitThis; } + void addParamPrecision(unsigned param, Decoration precision) + { + if (precision == DecorationRelaxedPrecision) + reducedPrecisionParams.insert(param); + } + Decoration getParamPrecision(unsigned param) const + { + return reducedPrecisionParams.find(param) != reducedPrecisionParams.end() ? + DecorationRelaxedPrecision : NoPrecision; + } + void dump(std::vector& out) const { // OpFunction @@ -380,6 +398,8 @@ protected: std::vector parameterInstructions; std::vector blocks; bool implicitThis; // true if this is a member function expecting to be passed a 'this' as the first argument + bool reducedPrecisionReturn; + std::set reducedPrecisionParams; // list of parameter indexes that need a relaxed precision arg }; // @@ -440,7 +460,8 @@ protected: // - the OpFunction instruction // - all the OpFunctionParameter instructions __inline Function::Function(Id id, Id resultType, Id functionType, Id firstParamId, Module& parent) - : parent(parent), functionInstruction(id, resultType, OpFunction), implicitThis(false) + : parent(parent), functionInstruction(id, resultType, OpFunction), implicitThis(false), + reducedPrecisionReturn(false) { // OpFunction functionInstruction.addImmediateOperand(FunctionControlMaskNone); diff --git a/thirdparty/glslang/StandAlone/DirStackFileIncluder.h b/thirdparty/glslang/StandAlone/DirStackFileIncluder.h new file mode 100644 index 00000000000..18734130e71 --- /dev/null +++ b/thirdparty/glslang/StandAlone/DirStackFileIncluder.h @@ -0,0 +1,141 @@ +// +// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. +// Copyright (C) 2017 Google, Inc. +// +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions +// are met: +// +// Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// +// Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// +// Neither the name of 3Dlabs Inc. Ltd. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +// POSSIBILITY OF SUCH DAMAGE. +// + +#pragma once + +#include +#include +#include +#include + +#include "./../glslang/Public/ShaderLang.h" + +// Default include class for normal include convention of search backward +// through the stack of active include paths (for nested includes). +// Can be overridden to customize. +class DirStackFileIncluder : public glslang::TShader::Includer { +public: + DirStackFileIncluder() : externalLocalDirectoryCount(0) { } + + virtual IncludeResult* includeLocal(const char* headerName, + const char* includerName, + size_t inclusionDepth) override + { + return readLocalPath(headerName, includerName, (int)inclusionDepth); + } + + virtual IncludeResult* includeSystem(const char* headerName, + const char* /*includerName*/, + size_t /*inclusionDepth*/) override + { + return readSystemPath(headerName); + } + + // Externally set directories. E.g., from a command-line -I. + // - Most-recently pushed are checked first. + // - All these are checked after the parse-time stack of local directories + // is checked. + // - This only applies to the "local" form of #include. + // - Makes its own copy of the path. + virtual void pushExternalLocalDirectory(const std::string& dir) + { + directoryStack.push_back(dir); + externalLocalDirectoryCount = (int)directoryStack.size(); + } + + virtual void releaseInclude(IncludeResult* result) override + { + if (result != nullptr) { + delete [] static_cast(result->userData); + delete result; + } + } + + virtual ~DirStackFileIncluder() override { } + +protected: + typedef char tUserDataElement; + std::vector directoryStack; + int externalLocalDirectoryCount; + + // Search for a valid "local" path based on combining the stack of include + // directories and the nominal name of the header. + virtual IncludeResult* readLocalPath(const char* headerName, const char* includerName, int depth) + { + // Discard popped include directories, and + // initialize when at parse-time first level. + directoryStack.resize(depth + externalLocalDirectoryCount); + if (depth == 1) + directoryStack.back() = getDirectory(includerName); + + // Find a directory that works, using a reverse search of the include stack. + for (auto it = directoryStack.rbegin(); it != directoryStack.rend(); ++it) { + std::string path = *it + '/' + headerName; + std::replace(path.begin(), path.end(), '\\', '/'); + std::ifstream file(path, std::ios_base::binary | std::ios_base::ate); + if (file) { + directoryStack.push_back(getDirectory(path)); + return newIncludeResult(path, file, (int)file.tellg()); + } + } + + return nullptr; + } + + // Search for a valid path. + // Not implemented yet; returning nullptr signals failure to find. + virtual IncludeResult* readSystemPath(const char* /*headerName*/) const + { + return nullptr; + } + + // Do actual reading of the file, filling in a new include result. + virtual IncludeResult* newIncludeResult(const std::string& path, std::ifstream& file, int length) const + { + char* content = new tUserDataElement [length]; + file.seekg(0, file.beg); + file.read(content, length); + return new IncludeResult(path, content, length, content); + } + + // If no path markers, return current working directory. + // Otherwise, strip file name and return path leading up to it. + virtual std::string getDirectory(const std::string path) const + { + size_t last = path.find_last_of("/\\"); + return last == std::string::npos ? "." : path.substr(0, last); + } +}; diff --git a/thirdparty/glslang/StandAlone/ResourceLimits.cpp b/thirdparty/glslang/StandAlone/ResourceLimits.cpp new file mode 100644 index 00000000000..7c7f4c4e49b --- /dev/null +++ b/thirdparty/glslang/StandAlone/ResourceLimits.cpp @@ -0,0 +1,496 @@ +// +// Copyright (C) 2016 Google, Inc. +// +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions +// are met: +// +// Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// +// Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// +// Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +// POSSIBILITY OF SUCH DAMAGE. + +#include +#include +#include +#include + +#include "ResourceLimits.h" + +namespace glslang { + +const TBuiltInResource DefaultTBuiltInResource = { + /* .MaxLights = */ 32, + /* .MaxClipPlanes = */ 6, + /* .MaxTextureUnits = */ 32, + /* .MaxTextureCoords = */ 32, + /* .MaxVertexAttribs = */ 64, + /* .MaxVertexUniformComponents = */ 4096, + /* .MaxVaryingFloats = */ 64, + /* .MaxVertexTextureImageUnits = */ 32, + /* .MaxCombinedTextureImageUnits = */ 80, + /* .MaxTextureImageUnits = */ 32, + /* .MaxFragmentUniformComponents = */ 4096, + /* .MaxDrawBuffers = */ 32, + /* .MaxVertexUniformVectors = */ 128, + /* .MaxVaryingVectors = */ 8, + /* .MaxFragmentUniformVectors = */ 16, + /* .MaxVertexOutputVectors = */ 16, + /* .MaxFragmentInputVectors = */ 15, + /* .MinProgramTexelOffset = */ -8, + /* .MaxProgramTexelOffset = */ 7, + /* .MaxClipDistances = */ 8, + /* .MaxComputeWorkGroupCountX = */ 65535, + /* .MaxComputeWorkGroupCountY = */ 65535, + /* .MaxComputeWorkGroupCountZ = */ 65535, + /* .MaxComputeWorkGroupSizeX = */ 1024, + /* .MaxComputeWorkGroupSizeY = */ 1024, + /* .MaxComputeWorkGroupSizeZ = */ 64, + /* .MaxComputeUniformComponents = */ 1024, + /* .MaxComputeTextureImageUnits = */ 16, + /* .MaxComputeImageUniforms = */ 8, + /* .MaxComputeAtomicCounters = */ 8, + /* .MaxComputeAtomicCounterBuffers = */ 1, + /* .MaxVaryingComponents = */ 60, + /* .MaxVertexOutputComponents = */ 64, + /* .MaxGeometryInputComponents = */ 64, + /* .MaxGeometryOutputComponents = */ 128, + /* .MaxFragmentInputComponents = */ 128, + /* .MaxImageUnits = */ 8, + /* .MaxCombinedImageUnitsAndFragmentOutputs = */ 8, + /* .MaxCombinedShaderOutputResources = */ 8, + /* .MaxImageSamples = */ 0, + /* .MaxVertexImageUniforms = */ 0, + /* .MaxTessControlImageUniforms = */ 0, + /* .MaxTessEvaluationImageUniforms = */ 0, + /* .MaxGeometryImageUniforms = */ 0, + /* .MaxFragmentImageUniforms = */ 8, + /* .MaxCombinedImageUniforms = */ 8, + /* .MaxGeometryTextureImageUnits = */ 16, + /* .MaxGeometryOutputVertices = */ 256, + /* .MaxGeometryTotalOutputComponents = */ 1024, + /* .MaxGeometryUniformComponents = */ 1024, + /* .MaxGeometryVaryingComponents = */ 64, + /* .MaxTessControlInputComponents = */ 128, + /* .MaxTessControlOutputComponents = */ 128, + /* .MaxTessControlTextureImageUnits = */ 16, + /* .MaxTessControlUniformComponents = */ 1024, + /* .MaxTessControlTotalOutputComponents = */ 4096, + /* .MaxTessEvaluationInputComponents = */ 128, + /* .MaxTessEvaluationOutputComponents = */ 128, + /* .MaxTessEvaluationTextureImageUnits = */ 16, + /* .MaxTessEvaluationUniformComponents = */ 1024, + /* .MaxTessPatchComponents = */ 120, + /* .MaxPatchVertices = */ 32, + /* .MaxTessGenLevel = */ 64, + /* .MaxViewports = */ 16, + /* .MaxVertexAtomicCounters = */ 0, + /* .MaxTessControlAtomicCounters = */ 0, + /* .MaxTessEvaluationAtomicCounters = */ 0, + /* .MaxGeometryAtomicCounters = */ 0, + /* .MaxFragmentAtomicCounters = */ 8, + /* .MaxCombinedAtomicCounters = */ 8, + /* .MaxAtomicCounterBindings = */ 1, + /* .MaxVertexAtomicCounterBuffers = */ 0, + /* .MaxTessControlAtomicCounterBuffers = */ 0, + /* .MaxTessEvaluationAtomicCounterBuffers = */ 0, + /* .MaxGeometryAtomicCounterBuffers = */ 0, + /* .MaxFragmentAtomicCounterBuffers = */ 1, + /* .MaxCombinedAtomicCounterBuffers = */ 1, + /* .MaxAtomicCounterBufferSize = */ 16384, + /* .MaxTransformFeedbackBuffers = */ 4, + /* .MaxTransformFeedbackInterleavedComponents = */ 64, + /* .MaxCullDistances = */ 8, + /* .MaxCombinedClipAndCullDistances = */ 8, + /* .MaxSamples = */ 4, + /* .maxMeshOutputVerticesNV = */ 256, + /* .maxMeshOutputPrimitivesNV = */ 512, + /* .maxMeshWorkGroupSizeX_NV = */ 32, + /* .maxMeshWorkGroupSizeY_NV = */ 1, + /* .maxMeshWorkGroupSizeZ_NV = */ 1, + /* .maxTaskWorkGroupSizeX_NV = */ 32, + /* .maxTaskWorkGroupSizeY_NV = */ 1, + /* .maxTaskWorkGroupSizeZ_NV = */ 1, + /* .maxMeshViewCountNV = */ 4, + /* .maxDualSourceDrawBuffersEXT = */ 1, + + /* .limits = */ { + /* .nonInductiveForLoops = */ 1, + /* .whileLoops = */ 1, + /* .doWhileLoops = */ 1, + /* .generalUniformIndexing = */ 1, + /* .generalAttributeMatrixVectorIndexing = */ 1, + /* .generalVaryingIndexing = */ 1, + /* .generalSamplerIndexing = */ 1, + /* .generalVariableIndexing = */ 1, + /* .generalConstantMatrixVectorIndexing = */ 1, + }}; + +std::string GetDefaultTBuiltInResourceString() +{ + std::ostringstream ostream; + + ostream << "MaxLights " << DefaultTBuiltInResource.maxLights << "\n" + << "MaxClipPlanes " << DefaultTBuiltInResource.maxClipPlanes << "\n" + << "MaxTextureUnits " << DefaultTBuiltInResource.maxTextureUnits << "\n" + << "MaxTextureCoords " << DefaultTBuiltInResource.maxTextureCoords << "\n" + << "MaxVertexAttribs " << DefaultTBuiltInResource.maxVertexAttribs << "\n" + << "MaxVertexUniformComponents " << DefaultTBuiltInResource.maxVertexUniformComponents << "\n" + << "MaxVaryingFloats " << DefaultTBuiltInResource.maxVaryingFloats << "\n" + << "MaxVertexTextureImageUnits " << DefaultTBuiltInResource.maxVertexTextureImageUnits << "\n" + << "MaxCombinedTextureImageUnits " << DefaultTBuiltInResource.maxCombinedTextureImageUnits << "\n" + << "MaxTextureImageUnits " << DefaultTBuiltInResource.maxTextureImageUnits << "\n" + << "MaxFragmentUniformComponents " << DefaultTBuiltInResource.maxFragmentUniformComponents << "\n" + << "MaxDrawBuffers " << DefaultTBuiltInResource.maxDrawBuffers << "\n" + << "MaxVertexUniformVectors " << DefaultTBuiltInResource.maxVertexUniformVectors << "\n" + << "MaxVaryingVectors " << DefaultTBuiltInResource.maxVaryingVectors << "\n" + << "MaxFragmentUniformVectors " << DefaultTBuiltInResource.maxFragmentUniformVectors << "\n" + << "MaxVertexOutputVectors " << DefaultTBuiltInResource.maxVertexOutputVectors << "\n" + << "MaxFragmentInputVectors " << DefaultTBuiltInResource.maxFragmentInputVectors << "\n" + << "MinProgramTexelOffset " << DefaultTBuiltInResource.minProgramTexelOffset << "\n" + << "MaxProgramTexelOffset " << DefaultTBuiltInResource.maxProgramTexelOffset << "\n" + << "MaxClipDistances " << DefaultTBuiltInResource.maxClipDistances << "\n" + << "MaxComputeWorkGroupCountX " << DefaultTBuiltInResource.maxComputeWorkGroupCountX << "\n" + << "MaxComputeWorkGroupCountY " << DefaultTBuiltInResource.maxComputeWorkGroupCountY << "\n" + << "MaxComputeWorkGroupCountZ " << DefaultTBuiltInResource.maxComputeWorkGroupCountZ << "\n" + << "MaxComputeWorkGroupSizeX " << DefaultTBuiltInResource.maxComputeWorkGroupSizeX << "\n" + << "MaxComputeWorkGroupSizeY " << DefaultTBuiltInResource.maxComputeWorkGroupSizeY << "\n" + << "MaxComputeWorkGroupSizeZ " << DefaultTBuiltInResource.maxComputeWorkGroupSizeZ << "\n" + << "MaxComputeUniformComponents " << DefaultTBuiltInResource.maxComputeUniformComponents << "\n" + << "MaxComputeTextureImageUnits " << DefaultTBuiltInResource.maxComputeTextureImageUnits << "\n" + << "MaxComputeImageUniforms " << DefaultTBuiltInResource.maxComputeImageUniforms << "\n" + << "MaxComputeAtomicCounters " << DefaultTBuiltInResource.maxComputeAtomicCounters << "\n" + << "MaxComputeAtomicCounterBuffers " << DefaultTBuiltInResource.maxComputeAtomicCounterBuffers << "\n" + << "MaxVaryingComponents " << DefaultTBuiltInResource.maxVaryingComponents << "\n" + << "MaxVertexOutputComponents " << DefaultTBuiltInResource.maxVertexOutputComponents << "\n" + << "MaxGeometryInputComponents " << DefaultTBuiltInResource.maxGeometryInputComponents << "\n" + << "MaxGeometryOutputComponents " << DefaultTBuiltInResource.maxGeometryOutputComponents << "\n" + << "MaxFragmentInputComponents " << DefaultTBuiltInResource.maxFragmentInputComponents << "\n" + << "MaxImageUnits " << DefaultTBuiltInResource.maxImageUnits << "\n" + << "MaxCombinedImageUnitsAndFragmentOutputs " << DefaultTBuiltInResource.maxCombinedImageUnitsAndFragmentOutputs << "\n" + << "MaxCombinedShaderOutputResources " << DefaultTBuiltInResource.maxCombinedShaderOutputResources << "\n" + << "MaxImageSamples " << DefaultTBuiltInResource.maxImageSamples << "\n" + << "MaxVertexImageUniforms " << DefaultTBuiltInResource.maxVertexImageUniforms << "\n" + << "MaxTessControlImageUniforms " << DefaultTBuiltInResource.maxTessControlImageUniforms << "\n" + << "MaxTessEvaluationImageUniforms " << DefaultTBuiltInResource.maxTessEvaluationImageUniforms << "\n" + << "MaxGeometryImageUniforms " << DefaultTBuiltInResource.maxGeometryImageUniforms << "\n" + << "MaxFragmentImageUniforms " << DefaultTBuiltInResource.maxFragmentImageUniforms << "\n" + << "MaxCombinedImageUniforms " << DefaultTBuiltInResource.maxCombinedImageUniforms << "\n" + << "MaxGeometryTextureImageUnits " << DefaultTBuiltInResource.maxGeometryTextureImageUnits << "\n" + << "MaxGeometryOutputVertices " << DefaultTBuiltInResource.maxGeometryOutputVertices << "\n" + << "MaxGeometryTotalOutputComponents " << DefaultTBuiltInResource.maxGeometryTotalOutputComponents << "\n" + << "MaxGeometryUniformComponents " << DefaultTBuiltInResource.maxGeometryUniformComponents << "\n" + << "MaxGeometryVaryingComponents " << DefaultTBuiltInResource.maxGeometryVaryingComponents << "\n" + << "MaxTessControlInputComponents " << DefaultTBuiltInResource.maxTessControlInputComponents << "\n" + << "MaxTessControlOutputComponents " << DefaultTBuiltInResource.maxTessControlOutputComponents << "\n" + << "MaxTessControlTextureImageUnits " << DefaultTBuiltInResource.maxTessControlTextureImageUnits << "\n" + << "MaxTessControlUniformComponents " << DefaultTBuiltInResource.maxTessControlUniformComponents << "\n" + << "MaxTessControlTotalOutputComponents " << DefaultTBuiltInResource.maxTessControlTotalOutputComponents << "\n" + << "MaxTessEvaluationInputComponents " << DefaultTBuiltInResource.maxTessEvaluationInputComponents << "\n" + << "MaxTessEvaluationOutputComponents " << DefaultTBuiltInResource.maxTessEvaluationOutputComponents << "\n" + << "MaxTessEvaluationTextureImageUnits " << DefaultTBuiltInResource.maxTessEvaluationTextureImageUnits << "\n" + << "MaxTessEvaluationUniformComponents " << DefaultTBuiltInResource.maxTessEvaluationUniformComponents << "\n" + << "MaxTessPatchComponents " << DefaultTBuiltInResource.maxTessPatchComponents << "\n" + << "MaxPatchVertices " << DefaultTBuiltInResource.maxPatchVertices << "\n" + << "MaxTessGenLevel " << DefaultTBuiltInResource.maxTessGenLevel << "\n" + << "MaxViewports " << DefaultTBuiltInResource.maxViewports << "\n" + << "MaxVertexAtomicCounters " << DefaultTBuiltInResource.maxVertexAtomicCounters << "\n" + << "MaxTessControlAtomicCounters " << DefaultTBuiltInResource.maxTessControlAtomicCounters << "\n" + << "MaxTessEvaluationAtomicCounters " << DefaultTBuiltInResource.maxTessEvaluationAtomicCounters << "\n" + << "MaxGeometryAtomicCounters " << DefaultTBuiltInResource.maxGeometryAtomicCounters << "\n" + << "MaxFragmentAtomicCounters " << DefaultTBuiltInResource.maxFragmentAtomicCounters << "\n" + << "MaxCombinedAtomicCounters " << DefaultTBuiltInResource.maxCombinedAtomicCounters << "\n" + << "MaxAtomicCounterBindings " << DefaultTBuiltInResource.maxAtomicCounterBindings << "\n" + << "MaxVertexAtomicCounterBuffers " << DefaultTBuiltInResource.maxVertexAtomicCounterBuffers << "\n" + << "MaxTessControlAtomicCounterBuffers " << DefaultTBuiltInResource.maxTessControlAtomicCounterBuffers << "\n" + << "MaxTessEvaluationAtomicCounterBuffers " << DefaultTBuiltInResource.maxTessEvaluationAtomicCounterBuffers << "\n" + << "MaxGeometryAtomicCounterBuffers " << DefaultTBuiltInResource.maxGeometryAtomicCounterBuffers << "\n" + << "MaxFragmentAtomicCounterBuffers " << DefaultTBuiltInResource.maxFragmentAtomicCounterBuffers << "\n" + << "MaxCombinedAtomicCounterBuffers " << DefaultTBuiltInResource.maxCombinedAtomicCounterBuffers << "\n" + << "MaxAtomicCounterBufferSize " << DefaultTBuiltInResource.maxAtomicCounterBufferSize << "\n" + << "MaxTransformFeedbackBuffers " << DefaultTBuiltInResource.maxTransformFeedbackBuffers << "\n" + << "MaxTransformFeedbackInterleavedComponents " << DefaultTBuiltInResource.maxTransformFeedbackInterleavedComponents << "\n" + << "MaxCullDistances " << DefaultTBuiltInResource.maxCullDistances << "\n" + << "MaxCombinedClipAndCullDistances " << DefaultTBuiltInResource.maxCombinedClipAndCullDistances << "\n" + << "MaxSamples " << DefaultTBuiltInResource.maxSamples << "\n" + << "MaxMeshOutputVerticesNV " << DefaultTBuiltInResource.maxMeshOutputVerticesNV << "\n" + << "MaxMeshOutputPrimitivesNV " << DefaultTBuiltInResource.maxMeshOutputPrimitivesNV << "\n" + << "MaxMeshWorkGroupSizeX_NV " << DefaultTBuiltInResource.maxMeshWorkGroupSizeX_NV << "\n" + << "MaxMeshWorkGroupSizeY_NV " << DefaultTBuiltInResource.maxMeshWorkGroupSizeY_NV << "\n" + << "MaxMeshWorkGroupSizeZ_NV " << DefaultTBuiltInResource.maxMeshWorkGroupSizeZ_NV << "\n" + << "MaxTaskWorkGroupSizeX_NV " << DefaultTBuiltInResource.maxTaskWorkGroupSizeX_NV << "\n" + << "MaxTaskWorkGroupSizeY_NV " << DefaultTBuiltInResource.maxTaskWorkGroupSizeY_NV << "\n" + << "MaxTaskWorkGroupSizeZ_NV " << DefaultTBuiltInResource.maxTaskWorkGroupSizeZ_NV << "\n" + << "MaxMeshViewCountNV " << DefaultTBuiltInResource.maxMeshViewCountNV << "\n" + << "MaxDualSourceDrawBuffersEXT " << DefaultTBuiltInResource.maxDualSourceDrawBuffersEXT << "\n" + << "nonInductiveForLoops " << DefaultTBuiltInResource.limits.nonInductiveForLoops << "\n" + << "whileLoops " << DefaultTBuiltInResource.limits.whileLoops << "\n" + << "doWhileLoops " << DefaultTBuiltInResource.limits.doWhileLoops << "\n" + << "generalUniformIndexing " << DefaultTBuiltInResource.limits.generalUniformIndexing << "\n" + << "generalAttributeMatrixVectorIndexing " << DefaultTBuiltInResource.limits.generalAttributeMatrixVectorIndexing << "\n" + << "generalVaryingIndexing " << DefaultTBuiltInResource.limits.generalVaryingIndexing << "\n" + << "generalSamplerIndexing " << DefaultTBuiltInResource.limits.generalSamplerIndexing << "\n" + << "generalVariableIndexing " << DefaultTBuiltInResource.limits.generalVariableIndexing << "\n" + << "generalConstantMatrixVectorIndexing " << DefaultTBuiltInResource.limits.generalConstantMatrixVectorIndexing << "\n" + ; + + return ostream.str(); +} + +void DecodeResourceLimits(TBuiltInResource* resources, char* config) +{ + static const char* delims = " \t\n\r"; + + size_t pos = 0; + std::string configStr(config); + + while ((pos = configStr.find_first_not_of(delims, pos)) != std::string::npos) { + const size_t token_s = pos; + const size_t token_e = configStr.find_first_of(delims, token_s); + const size_t value_s = configStr.find_first_not_of(delims, token_e); + const size_t value_e = configStr.find_first_of(delims, value_s); + pos = value_e; + + // Faster to use compare(), but prefering readability. + const std::string tokenStr = configStr.substr(token_s, token_e-token_s); + const std::string valueStr = configStr.substr(value_s, value_e-value_s); + + if (value_s == std::string::npos || ! (valueStr[0] == '-' || isdigit(valueStr[0]))) { + printf("Error: '%s' bad .conf file. Each name must be followed by one number.\n", + valueStr.c_str()); + return; + } + + const int value = std::atoi(valueStr.c_str()); + + if (tokenStr == "MaxLights") + resources->maxLights = value; + else if (tokenStr == "MaxClipPlanes") + resources->maxClipPlanes = value; + else if (tokenStr == "MaxTextureUnits") + resources->maxTextureUnits = value; + else if (tokenStr == "MaxTextureCoords") + resources->maxTextureCoords = value; + else if (tokenStr == "MaxVertexAttribs") + resources->maxVertexAttribs = value; + else if (tokenStr == "MaxVertexUniformComponents") + resources->maxVertexUniformComponents = value; + else if (tokenStr == "MaxVaryingFloats") + resources->maxVaryingFloats = value; + else if (tokenStr == "MaxVertexTextureImageUnits") + resources->maxVertexTextureImageUnits = value; + else if (tokenStr == "MaxCombinedTextureImageUnits") + resources->maxCombinedTextureImageUnits = value; + else if (tokenStr == "MaxTextureImageUnits") + resources->maxTextureImageUnits = value; + else if (tokenStr == "MaxFragmentUniformComponents") + resources->maxFragmentUniformComponents = value; + else if (tokenStr == "MaxDrawBuffers") + resources->maxDrawBuffers = value; + else if (tokenStr == "MaxVertexUniformVectors") + resources->maxVertexUniformVectors = value; + else if (tokenStr == "MaxVaryingVectors") + resources->maxVaryingVectors = value; + else if (tokenStr == "MaxFragmentUniformVectors") + resources->maxFragmentUniformVectors = value; + else if (tokenStr == "MaxVertexOutputVectors") + resources->maxVertexOutputVectors = value; + else if (tokenStr == "MaxFragmentInputVectors") + resources->maxFragmentInputVectors = value; + else if (tokenStr == "MinProgramTexelOffset") + resources->minProgramTexelOffset = value; + else if (tokenStr == "MaxProgramTexelOffset") + resources->maxProgramTexelOffset = value; + else if (tokenStr == "MaxClipDistances") + resources->maxClipDistances = value; + else if (tokenStr == "MaxComputeWorkGroupCountX") + resources->maxComputeWorkGroupCountX = value; + else if (tokenStr == "MaxComputeWorkGroupCountY") + resources->maxComputeWorkGroupCountY = value; + else if (tokenStr == "MaxComputeWorkGroupCountZ") + resources->maxComputeWorkGroupCountZ = value; + else if (tokenStr == "MaxComputeWorkGroupSizeX") + resources->maxComputeWorkGroupSizeX = value; + else if (tokenStr == "MaxComputeWorkGroupSizeY") + resources->maxComputeWorkGroupSizeY = value; + else if (tokenStr == "MaxComputeWorkGroupSizeZ") + resources->maxComputeWorkGroupSizeZ = value; + else if (tokenStr == "MaxComputeUniformComponents") + resources->maxComputeUniformComponents = value; + else if (tokenStr == "MaxComputeTextureImageUnits") + resources->maxComputeTextureImageUnits = value; + else if (tokenStr == "MaxComputeImageUniforms") + resources->maxComputeImageUniforms = value; + else if (tokenStr == "MaxComputeAtomicCounters") + resources->maxComputeAtomicCounters = value; + else if (tokenStr == "MaxComputeAtomicCounterBuffers") + resources->maxComputeAtomicCounterBuffers = value; + else if (tokenStr == "MaxVaryingComponents") + resources->maxVaryingComponents = value; + else if (tokenStr == "MaxVertexOutputComponents") + resources->maxVertexOutputComponents = value; + else if (tokenStr == "MaxGeometryInputComponents") + resources->maxGeometryInputComponents = value; + else if (tokenStr == "MaxGeometryOutputComponents") + resources->maxGeometryOutputComponents = value; + else if (tokenStr == "MaxFragmentInputComponents") + resources->maxFragmentInputComponents = value; + else if (tokenStr == "MaxImageUnits") + resources->maxImageUnits = value; + else if (tokenStr == "MaxCombinedImageUnitsAndFragmentOutputs") + resources->maxCombinedImageUnitsAndFragmentOutputs = value; + else if (tokenStr == "MaxCombinedShaderOutputResources") + resources->maxCombinedShaderOutputResources = value; + else if (tokenStr == "MaxImageSamples") + resources->maxImageSamples = value; + else if (tokenStr == "MaxVertexImageUniforms") + resources->maxVertexImageUniforms = value; + else if (tokenStr == "MaxTessControlImageUniforms") + resources->maxTessControlImageUniforms = value; + else if (tokenStr == "MaxTessEvaluationImageUniforms") + resources->maxTessEvaluationImageUniforms = value; + else if (tokenStr == "MaxGeometryImageUniforms") + resources->maxGeometryImageUniforms = value; + else if (tokenStr == "MaxFragmentImageUniforms") + resources->maxFragmentImageUniforms = value; + else if (tokenStr == "MaxCombinedImageUniforms") + resources->maxCombinedImageUniforms = value; + else if (tokenStr == "MaxGeometryTextureImageUnits") + resources->maxGeometryTextureImageUnits = value; + else if (tokenStr == "MaxGeometryOutputVertices") + resources->maxGeometryOutputVertices = value; + else if (tokenStr == "MaxGeometryTotalOutputComponents") + resources->maxGeometryTotalOutputComponents = value; + else if (tokenStr == "MaxGeometryUniformComponents") + resources->maxGeometryUniformComponents = value; + else if (tokenStr == "MaxGeometryVaryingComponents") + resources->maxGeometryVaryingComponents = value; + else if (tokenStr == "MaxTessControlInputComponents") + resources->maxTessControlInputComponents = value; + else if (tokenStr == "MaxTessControlOutputComponents") + resources->maxTessControlOutputComponents = value; + else if (tokenStr == "MaxTessControlTextureImageUnits") + resources->maxTessControlTextureImageUnits = value; + else if (tokenStr == "MaxTessControlUniformComponents") + resources->maxTessControlUniformComponents = value; + else if (tokenStr == "MaxTessControlTotalOutputComponents") + resources->maxTessControlTotalOutputComponents = value; + else if (tokenStr == "MaxTessEvaluationInputComponents") + resources->maxTessEvaluationInputComponents = value; + else if (tokenStr == "MaxTessEvaluationOutputComponents") + resources->maxTessEvaluationOutputComponents = value; + else if (tokenStr == "MaxTessEvaluationTextureImageUnits") + resources->maxTessEvaluationTextureImageUnits = value; + else if (tokenStr == "MaxTessEvaluationUniformComponents") + resources->maxTessEvaluationUniformComponents = value; + else if (tokenStr == "MaxTessPatchComponents") + resources->maxTessPatchComponents = value; + else if (tokenStr == "MaxPatchVertices") + resources->maxPatchVertices = value; + else if (tokenStr == "MaxTessGenLevel") + resources->maxTessGenLevel = value; + else if (tokenStr == "MaxViewports") + resources->maxViewports = value; + else if (tokenStr == "MaxVertexAtomicCounters") + resources->maxVertexAtomicCounters = value; + else if (tokenStr == "MaxTessControlAtomicCounters") + resources->maxTessControlAtomicCounters = value; + else if (tokenStr == "MaxTessEvaluationAtomicCounters") + resources->maxTessEvaluationAtomicCounters = value; + else if (tokenStr == "MaxGeometryAtomicCounters") + resources->maxGeometryAtomicCounters = value; + else if (tokenStr == "MaxFragmentAtomicCounters") + resources->maxFragmentAtomicCounters = value; + else if (tokenStr == "MaxCombinedAtomicCounters") + resources->maxCombinedAtomicCounters = value; + else if (tokenStr == "MaxAtomicCounterBindings") + resources->maxAtomicCounterBindings = value; + else if (tokenStr == "MaxVertexAtomicCounterBuffers") + resources->maxVertexAtomicCounterBuffers = value; + else if (tokenStr == "MaxTessControlAtomicCounterBuffers") + resources->maxTessControlAtomicCounterBuffers = value; + else if (tokenStr == "MaxTessEvaluationAtomicCounterBuffers") + resources->maxTessEvaluationAtomicCounterBuffers = value; + else if (tokenStr == "MaxGeometryAtomicCounterBuffers") + resources->maxGeometryAtomicCounterBuffers = value; + else if (tokenStr == "MaxFragmentAtomicCounterBuffers") + resources->maxFragmentAtomicCounterBuffers = value; + else if (tokenStr == "MaxCombinedAtomicCounterBuffers") + resources->maxCombinedAtomicCounterBuffers = value; + else if (tokenStr == "MaxAtomicCounterBufferSize") + resources->maxAtomicCounterBufferSize = value; + else if (tokenStr == "MaxTransformFeedbackBuffers") + resources->maxTransformFeedbackBuffers = value; + else if (tokenStr == "MaxTransformFeedbackInterleavedComponents") + resources->maxTransformFeedbackInterleavedComponents = value; + else if (tokenStr == "MaxCullDistances") + resources->maxCullDistances = value; + else if (tokenStr == "MaxCombinedClipAndCullDistances") + resources->maxCombinedClipAndCullDistances = value; + else if (tokenStr == "MaxSamples") + resources->maxSamples = value; + else if (tokenStr == "MaxMeshOutputVerticesNV") + resources->maxMeshOutputVerticesNV = value; + else if (tokenStr == "MaxMeshOutputPrimitivesNV") + resources->maxMeshOutputPrimitivesNV = value; + else if (tokenStr == "MaxMeshWorkGroupSizeX_NV") + resources->maxMeshWorkGroupSizeX_NV = value; + else if (tokenStr == "MaxMeshWorkGroupSizeY_NV") + resources->maxMeshWorkGroupSizeY_NV = value; + else if (tokenStr == "MaxMeshWorkGroupSizeZ_NV") + resources->maxMeshWorkGroupSizeZ_NV = value; + else if (tokenStr == "MaxTaskWorkGroupSizeX_NV") + resources->maxTaskWorkGroupSizeX_NV = value; + else if (tokenStr == "MaxTaskWorkGroupSizeY_NV") + resources->maxTaskWorkGroupSizeY_NV = value; + else if (tokenStr == "MaxTaskWorkGroupSizeZ_NV") + resources->maxTaskWorkGroupSizeZ_NV = value; + else if (tokenStr == "MaxMeshViewCountNV") + resources->maxMeshViewCountNV = value; + else if (tokenStr == "nonInductiveForLoops") + resources->limits.nonInductiveForLoops = (value != 0); + else if (tokenStr == "whileLoops") + resources->limits.whileLoops = (value != 0); + else if (tokenStr == "doWhileLoops") + resources->limits.doWhileLoops = (value != 0); + else if (tokenStr == "generalUniformIndexing") + resources->limits.generalUniformIndexing = (value != 0); + else if (tokenStr == "generalAttributeMatrixVectorIndexing") + resources->limits.generalAttributeMatrixVectorIndexing = (value != 0); + else if (tokenStr == "generalVaryingIndexing") + resources->limits.generalVaryingIndexing = (value != 0); + else if (tokenStr == "generalSamplerIndexing") + resources->limits.generalSamplerIndexing = (value != 0); + else if (tokenStr == "generalVariableIndexing") + resources->limits.generalVariableIndexing = (value != 0); + else if (tokenStr == "generalConstantMatrixVectorIndexing") + resources->limits.generalConstantMatrixVectorIndexing = (value != 0); + else + printf("Warning: unrecognized limit (%s) in configuration file.\n", tokenStr.c_str()); + + } +} + +} // end namespace glslang diff --git a/thirdparty/glslang/glslang/MachineIndependent/pch.cpp b/thirdparty/glslang/StandAlone/ResourceLimits.h similarity index 63% rename from thirdparty/glslang/glslang/MachineIndependent/pch.cpp rename to thirdparty/glslang/StandAlone/ResourceLimits.h index b7a08654a54..736248eb390 100644 --- a/thirdparty/glslang/glslang/MachineIndependent/pch.cpp +++ b/thirdparty/glslang/StandAlone/ResourceLimits.h @@ -1,5 +1,6 @@ // -// Copyright (C) 2018 The Khronos Group Inc. +// Copyright (C) 2016 Google, Inc. +// // All rights reserved. // // Redistribution and use in source and binary forms, with or without @@ -14,7 +15,7 @@ // disclaimer in the documentation and/or other materials provided // with the distribution. // -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its +// Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived // from this software without specific prior written permission. // @@ -30,6 +31,27 @@ // LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN // ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. -// -#include "pch.h" +#ifndef _STAND_ALONE_RESOURCE_LIMITS_INCLUDED_ +#define _STAND_ALONE_RESOURCE_LIMITS_INCLUDED_ + +#include + +#include "../glslang/Include/ResourceLimits.h" + +namespace glslang { + +// These are the default resources for TBuiltInResources, used for both +// - parsing this string for the case where the user didn't supply one, +// - dumping out a template for user construction of a config file. +extern const TBuiltInResource DefaultTBuiltInResource; + +// Returns the DefaultTBuiltInResource as a human-readable string. +std::string GetDefaultTBuiltInResourceString(); + +// Decodes the resource limits from |config| to |resources|. +void DecodeResourceLimits(TBuiltInResource* resources, char* config); + +} // end namespace glslang + +#endif // _STAND_ALONE_RESOURCE_LIMITS_INCLUDED_ diff --git a/thirdparty/glslang/glslang/CInterface/glslang_c_interface.cpp b/thirdparty/glslang/glslang/CInterface/glslang_c_interface.cpp new file mode 100644 index 00000000000..2e04f53aceb --- /dev/null +++ b/thirdparty/glslang/glslang/CInterface/glslang_c_interface.cpp @@ -0,0 +1,428 @@ +/** + This code is based on the glslang_c_interface implementation by Viktor Latypov +**/ + +/** +BSD 2-Clause License + +Copyright (c) 2019, Viktor Latypov +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +**/ + +#include "glslang/Include/glslang_c_interface.h" + +#include "StandAlone/DirStackFileIncluder.h" +#include "StandAlone/ResourceLimits.h" +#include "glslang/Include/ShHandle.h" + +#include "glslang/Include/ResourceLimits.h" +#include "glslang/MachineIndependent/Versions.h" + +static_assert(int(GLSLANG_STAGE_COUNT) == EShLangCount, ""); +static_assert(int(GLSLANG_STAGE_MASK_COUNT) == EShLanguageMaskCount, ""); +static_assert(int(GLSLANG_SOURCE_COUNT) == glslang::EShSourceCount, ""); +static_assert(int(GLSLANG_CLIENT_COUNT) == glslang::EShClientCount, ""); +static_assert(int(GLSLANG_TARGET_COUNT) == glslang::EShTargetCount, ""); +static_assert(int(GLSLANG_TARGET_CLIENT_VERSION_COUNT) == glslang::EShTargetClientVersionCount, ""); +static_assert(int(GLSLANG_TARGET_LANGUAGE_VERSION_COUNT) == glslang::EShTargetLanguageVersionCount, ""); +static_assert(int(GLSLANG_OPT_LEVEL_COUNT) == EshOptLevelCount, ""); +static_assert(int(GLSLANG_TEX_SAMP_TRANS_COUNT) == EShTexSampTransCount, ""); +static_assert(int(GLSLANG_MSG_COUNT) == EShMsgCount, ""); +static_assert(int(GLSLANG_REFLECTION_COUNT) == EShReflectionCount, ""); +static_assert(int(GLSLANG_PROFILE_COUNT) == EProfileCount, ""); +static_assert(sizeof(glslang_limits_t) == sizeof(TLimits), ""); +static_assert(sizeof(glslang_resource_t) == sizeof(TBuiltInResource), ""); + +typedef struct glslang_shader_s { + glslang::TShader* shader; + std::string preprocessedGLSL; +} glslang_shader_t; + +typedef struct glslang_program_s { + glslang::TProgram* program; + std::vector spirv; + std::string loggerMessages; +} glslang_program_t; + +/* Wrapper/Adapter for C glsl_include_callbacks_t functions + + This class contains a 'glsl_include_callbacks_t' structure + with C include_local/include_system callback pointers. + + This class implement TShader::Includer interface + by redirecting C++ virtual methods to C callbacks. + + The 'IncludeResult' instances produced by this Includer + contain a reference to glsl_include_result_t C structure + to allow its lifetime management by another C callback + (CallbackIncluder::callbacks::free_include_result) +*/ +class CallbackIncluder : public glslang::TShader::Includer { +public: + /* Wrapper of IncludeResult which stores a glsl_include_result object internally */ + class CallbackIncludeResult : public glslang::TShader::Includer::IncludeResult { + public: + CallbackIncludeResult(const std::string& headerName, const char* const headerData, const size_t headerLength, + void* userData, glsl_include_result_t* includeResult) + : glslang::TShader::Includer::IncludeResult(headerName, headerData, headerLength, userData), + includeResult(includeResult) + { + } + + virtual ~CallbackIncludeResult() {} + + protected: + friend class CallbackIncluder; + + glsl_include_result_t* includeResult; + }; + +public: + CallbackIncluder(glsl_include_callbacks_t _callbacks, void* _context) : callbacks(_callbacks), context(_context) {} + + virtual ~CallbackIncluder() {} + + virtual IncludeResult* includeSystem(const char* headerName, const char* includerName, + size_t inclusionDepth) override + { + if (this->callbacks.include_system) { + glsl_include_result_t* result = + this->callbacks.include_system(this->context, headerName, includerName, inclusionDepth); + + return new CallbackIncludeResult(std::string(headerName), result->header_data, result->header_length, + nullptr, result); + } + + return glslang::TShader::Includer::includeSystem(headerName, includerName, inclusionDepth); + } + + virtual IncludeResult* includeLocal(const char* headerName, const char* includerName, + size_t inclusionDepth) override + { + if (this->callbacks.include_local) { + glsl_include_result_t* result = + this->callbacks.include_local(this->context, headerName, includerName, inclusionDepth); + + return new CallbackIncludeResult(std::string(headerName), result->header_data, result->header_length, + nullptr, result); + } + + return glslang::TShader::Includer::includeLocal(headerName, includerName, inclusionDepth); + } + + /* This function only calls free_include_result callback + when the IncludeResult instance is allocated by a C function */ + virtual void releaseInclude(IncludeResult* result) override + { + if (result == nullptr) + return; + + if (this->callbacks.free_include_result && (result->userData == nullptr)) { + CallbackIncludeResult* innerResult = static_cast(result); + /* use internal free() function */ + this->callbacks.free_include_result(this->context, innerResult->includeResult); + /* ignore internal fields of TShader::Includer::IncludeResult */ + delete result; + return; + } + + delete[] static_cast(result->userData); + delete result; + } + +private: + CallbackIncluder() {} + + /* C callback pointers */ + glsl_include_callbacks_t callbacks; + /* User-defined context */ + void* context; +}; + +GLSLANG_EXPORT int glslang_initialize_process() { return static_cast(glslang::InitializeProcess()); } + +GLSLANG_EXPORT void glslang_finalize_process() { glslang::FinalizeProcess(); } + +static EShLanguage c_shader_stage(glslang_stage_t stage) +{ + switch (stage) { + case GLSLANG_STAGE_VERTEX: + return EShLangVertex; + case GLSLANG_STAGE_TESSCONTROL: + return EShLangTessControl; + case GLSLANG_STAGE_TESSEVALUATION: + return EShLangTessEvaluation; + case GLSLANG_STAGE_GEOMETRY: + return EShLangGeometry; + case GLSLANG_STAGE_FRAGMENT: + return EShLangFragment; + case GLSLANG_STAGE_COMPUTE: + return EShLangCompute; + case GLSLANG_STAGE_RAYGEN_NV: + return EShLangRayGen; + case GLSLANG_STAGE_INTERSECT_NV: + return EShLangIntersect; + case GLSLANG_STAGE_ANYHIT_NV: + return EShLangAnyHit; + case GLSLANG_STAGE_CLOSESTHIT_NV: + return EShLangClosestHit; + case GLSLANG_STAGE_MISS_NV: + return EShLangMiss; + case GLSLANG_STAGE_CALLABLE_NV: + return EShLangCallable; + case GLSLANG_STAGE_TASK_NV: + return EShLangTaskNV; + case GLSLANG_STAGE_MESH_NV: + return EShLangMeshNV; + default: + break; + } + return EShLangCount; +} + +static int c_shader_messages(glslang_messages_t messages) +{ +#define CONVERT_MSG(in, out) \ + if ((messages & in) == in) \ + res |= out; + + int res = 0; + + CONVERT_MSG(GLSLANG_MSG_RELAXED_ERRORS_BIT, EShMsgRelaxedErrors); + CONVERT_MSG(GLSLANG_MSG_SUPPRESS_WARNINGS_BIT, EShMsgSuppressWarnings); + CONVERT_MSG(GLSLANG_MSG_AST_BIT, EShMsgAST); + CONVERT_MSG(GLSLANG_MSG_SPV_RULES_BIT, EShMsgSpvRules); + CONVERT_MSG(GLSLANG_MSG_VULKAN_RULES_BIT, EShMsgVulkanRules); + CONVERT_MSG(GLSLANG_MSG_ONLY_PREPROCESSOR_BIT, EShMsgOnlyPreprocessor); + CONVERT_MSG(GLSLANG_MSG_READ_HLSL_BIT, EShMsgReadHlsl); + CONVERT_MSG(GLSLANG_MSG_CASCADING_ERRORS_BIT, EShMsgCascadingErrors); + CONVERT_MSG(GLSLANG_MSG_KEEP_UNCALLED_BIT, EShMsgKeepUncalled); + CONVERT_MSG(GLSLANG_MSG_HLSL_OFFSETS_BIT, EShMsgHlslOffsets); + CONVERT_MSG(GLSLANG_MSG_DEBUG_INFO_BIT, EShMsgDebugInfo); + CONVERT_MSG(GLSLANG_MSG_HLSL_ENABLE_16BIT_TYPES_BIT, EShMsgHlslEnable16BitTypes); + CONVERT_MSG(GLSLANG_MSG_HLSL_LEGALIZATION_BIT, EShMsgHlslLegalization); + CONVERT_MSG(GLSLANG_MSG_HLSL_DX9_COMPATIBLE_BIT, EShMsgHlslDX9Compatible); + CONVERT_MSG(GLSLANG_MSG_BUILTIN_SYMBOL_TABLE_BIT, EShMsgBuiltinSymbolTable); + return res; +#undef CONVERT_MSG +} + +static glslang::EShTargetLanguageVersion +c_shader_target_language_version(glslang_target_language_version_t target_language_version) +{ + switch (target_language_version) { + case GLSLANG_TARGET_SPV_1_0: + return glslang::EShTargetSpv_1_0; + case GLSLANG_TARGET_SPV_1_1: + return glslang::EShTargetSpv_1_1; + case GLSLANG_TARGET_SPV_1_2: + return glslang::EShTargetSpv_1_2; + case GLSLANG_TARGET_SPV_1_3: + return glslang::EShTargetSpv_1_3; + case GLSLANG_TARGET_SPV_1_4: + return glslang::EShTargetSpv_1_4; + case GLSLANG_TARGET_SPV_1_5: + return glslang::EShTargetSpv_1_5; + default: + break; + } + return glslang::EShTargetSpv_1_0; +} + +static glslang::EShClient c_shader_client(glslang_client_t client) +{ + switch (client) { + case GLSLANG_CLIENT_VULKAN: + return glslang::EShClientVulkan; + case GLSLANG_CLIENT_OPENGL: + return glslang::EShClientOpenGL; + default: + break; + } + + return glslang::EShClientNone; +} + +static glslang::EShTargetClientVersion c_shader_client_version(glslang_target_client_version_t client_version) +{ + switch (client_version) { + case GLSLANG_TARGET_VULKAN_1_1: + return glslang::EShTargetVulkan_1_1; + case GLSLANG_TARGET_OPENGL_450: + return glslang::EShTargetOpenGL_450; + default: + break; + } + + return glslang::EShTargetVulkan_1_0; +} + +static glslang::EShTargetLanguage c_shader_target_language(glslang_target_language_t target_language) +{ + if (target_language == GLSLANG_TARGET_NONE) + return glslang::EShTargetNone; + + return glslang::EShTargetSpv; +} + +static glslang::EShSource c_shader_source(glslang_source_t source) +{ + switch (source) { + case GLSLANG_SOURCE_GLSL: + return glslang::EShSourceGlsl; + case GLSLANG_SOURCE_HLSL: + return glslang::EShSourceHlsl; + default: + break; + } + + return glslang::EShSourceNone; +} + +static EProfile c_shader_profile(glslang_profile_t profile) +{ + switch (profile) { + case GLSLANG_BAD_PROFILE: + return EBadProfile; + case GLSLANG_NO_PROFILE: + return ENoProfile; + case GLSLANG_CORE_PROFILE: + return ECoreProfile; + case GLSLANG_COMPATIBILITY_PROFILE: + return ECompatibilityProfile; + case GLSLANG_ES_PROFILE: + return EEsProfile; + case GLSLANG_PROFILE_COUNT: // Should not use this + break; + } + + return EProfile(); +} + +GLSLANG_EXPORT glslang_shader_t* glslang_shader_create(const glslang_input_t* input) +{ + if (!input || !input->code) { + printf("Error creating shader: null input(%p)/input->code\n", input); + + if (input) + printf("input->code = %p\n", input->code); + + return nullptr; + } + + glslang_shader_t* shader = new glslang_shader_t(); + + shader->shader = new glslang::TShader(c_shader_stage(input->stage)); + shader->shader->setStrings(&input->code, 1); + shader->shader->setEnvInput(c_shader_source(input->language), c_shader_stage(input->stage), + c_shader_client(input->client), input->default_version); + shader->shader->setEnvClient(c_shader_client(input->client), c_shader_client_version(input->client_version)); + shader->shader->setEnvTarget(c_shader_target_language(input->target_language), + c_shader_target_language_version(input->target_language_version)); + + return shader; +} + +GLSLANG_EXPORT const char* glslang_shader_get_preprocessed_code(glslang_shader_t* shader) +{ + return shader->preprocessedGLSL.c_str(); +} + +GLSLANG_EXPORT int glslang_shader_preprocess(glslang_shader_t* shader, const glslang_input_t* input) +{ + DirStackFileIncluder Includer; + /* TODO: use custom callbacks if they are available in 'i->callbacks' */ + return shader->shader->preprocess( + reinterpret_cast(input->resource), + input->default_version, + c_shader_profile(input->default_profile), + input->force_default_version_and_profile != 0, + input->forward_compatible != 0, + (EShMessages)c_shader_messages(input->messages), + &shader->preprocessedGLSL, + Includer + ); +} + +GLSLANG_EXPORT int glslang_shader_parse(glslang_shader_t* shader, const glslang_input_t* input) +{ + const char* preprocessedCStr = shader->preprocessedGLSL.c_str(); + shader->shader->setStrings(&preprocessedCStr, 1); + + return shader->shader->parse( + reinterpret_cast(input->resource), + input->default_version, + input->forward_compatible != 0, + (EShMessages)c_shader_messages(input->messages) + ); +} + +GLSLANG_EXPORT const char* glslang_shader_get_info_log(glslang_shader_t* shader) { return shader->shader->getInfoLog(); } + +GLSLANG_EXPORT const char* glslang_shader_get_info_debug_log(glslang_shader_t* shader) { return shader->shader->getInfoDebugLog(); } + +GLSLANG_EXPORT void glslang_shader_delete(glslang_shader_t* shader) +{ + if (!shader) + return; + + delete (shader->shader); + delete (shader); +} + +GLSLANG_EXPORT glslang_program_t* glslang_program_create() +{ + glslang_program_t* p = new glslang_program_t(); + p->program = new glslang::TProgram(); + return p; +} + +GLSLANG_EXPORT void glslang_program_delete(glslang_program_t* program) +{ + if (!program) + return; + + delete (program->program); + delete (program); +} + +GLSLANG_EXPORT void glslang_program_add_shader(glslang_program_t* program, glslang_shader_t* shader) +{ + program->program->addShader(shader->shader); +} + +GLSLANG_EXPORT int glslang_program_link(glslang_program_t* program, int messages) +{ + return (int)program->program->link((EShMessages)messages); +} + +GLSLANG_EXPORT const char* glslang_program_get_info_log(glslang_program_t* program) +{ + return program->program->getInfoLog(); +} + +GLSLANG_EXPORT const char* glslang_program_get_info_debug_log(glslang_program_t* program) +{ + return program->program->getInfoDebugLog(); +} diff --git a/thirdparty/glslang/glslang/Include/BaseTypes.h b/thirdparty/glslang/glslang/Include/BaseTypes.h index 6d4b4ff8e37..b69eaebf2d7 100644 --- a/thirdparty/glslang/glslang/Include/BaseTypes.h +++ b/thirdparty/glslang/glslang/Include/BaseTypes.h @@ -2,6 +2,7 @@ // Copyright (C) 2002-2005 3Dlabs Inc. Ltd. // Copyright (C) 2012-2013 LunarG, Inc. // Copyright (C) 2017 ARM Limited. +// Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved. // // All rights reserved. // @@ -61,8 +62,9 @@ enum TBasicType { EbtSampler, EbtStruct, EbtBlock, - EbtAccStructNV, + EbtAccStruct, EbtReference, + EbtRayQuery, // HLSL types that live only temporarily. EbtString, @@ -90,11 +92,11 @@ enum TStorageQualifier { EvqBuffer, // read/write, shared with app EvqShared, // compute shader's read/write 'shared' qualifier - EvqPayloadNV, - EvqPayloadInNV, - EvqHitAttrNV, - EvqCallableDataNV, - EvqCallableDataInNV, + EvqPayload, + EvqPayloadIn, + EvqHitAttr, + EvqCallableData, + EvqCallableDataIn, // parameters EvqIn, // also, for 'in' in the grammar before we know if it's a pipeline input or an 'in' parameter @@ -229,6 +231,9 @@ enum TBuiltInVariable { EbvFragSizeEXT, EbvFragInvocationCountEXT, + EbvSecondaryFragDataEXT, + EbvSecondaryFragColorEXT, + EbvViewportMaskNV, EbvSecondaryPositionNV, EbvSecondaryViewportMaskNV, @@ -238,20 +243,23 @@ enum TBuiltInVariable { EbvFragmentSizeNV, EbvInvocationsPerPixelNV, // ray tracing - EbvLaunchIdNV, - EbvLaunchSizeNV, - EbvInstanceCustomIndexNV, - EbvWorldRayOriginNV, - EbvWorldRayDirectionNV, - EbvObjectRayOriginNV, - EbvObjectRayDirectionNV, - EbvRayTminNV, - EbvRayTmaxNV, - EbvHitTNV, - EbvHitKindNV, - EbvObjectToWorldNV, - EbvWorldToObjectNV, - EbvIncomingRayFlagsNV, + EbvLaunchId, + EbvLaunchSize, + EbvInstanceCustomIndex, + EbvGeometryIndex, + EbvWorldRayOrigin, + EbvWorldRayDirection, + EbvObjectRayOrigin, + EbvObjectRayDirection, + EbvRayTmin, + EbvRayTmax, + EbvHitT, + EbvHitKind, + EbvObjectToWorld, + EbvObjectToWorld3x4, + EbvWorldToObject, + EbvWorldToObject3x4, + EbvIncomingRayFlags, // barycentrics EbvBaryCoordNV, EbvBaryCoordNoPerspNV, @@ -328,11 +336,11 @@ __inline const char* GetStorageQualifierString(TStorageQualifier q) case EvqPointCoord: return "gl_PointCoord"; break; case EvqFragColor: return "fragColor"; break; case EvqFragDepth: return "gl_FragDepth"; break; - case EvqPayloadNV: return "rayPayloadNV"; break; - case EvqPayloadInNV: return "rayPayloadInNV"; break; - case EvqHitAttrNV: return "hitAttributeNV"; break; - case EvqCallableDataNV: return "callableDataNV"; break; - case EvqCallableDataInNV: return "callableDataInNV"; break; + case EvqPayload: return "rayPayloadNV"; break; + case EvqPayloadIn: return "rayPayloadInNV"; break; + case EvqHitAttr: return "hitAttributeNV"; break; + case EvqCallableData: return "callableDataNV"; break; + case EvqCallableDataIn: return "callableDataInNV"; break; default: return "unknown qualifier"; } } @@ -428,6 +436,9 @@ __inline const char* GetBuiltInVariableString(TBuiltInVariable v) case EbvFragSizeEXT: return "FragSizeEXT"; case EbvFragInvocationCountEXT: return "FragInvocationCountEXT"; + case EbvSecondaryFragDataEXT: return "SecondaryFragDataEXT"; + case EbvSecondaryFragColorEXT: return "SecondaryFragColorEXT"; + case EbvViewportMaskNV: return "ViewportMaskNV"; case EbvSecondaryPositionNV: return "SecondaryPositionNV"; case EbvSecondaryViewportMaskNV: return "SecondaryViewportMaskNV"; @@ -436,20 +447,21 @@ __inline const char* GetBuiltInVariableString(TBuiltInVariable v) case EbvFragFullyCoveredNV: return "FragFullyCoveredNV"; case EbvFragmentSizeNV: return "FragmentSizeNV"; case EbvInvocationsPerPixelNV: return "InvocationsPerPixelNV"; - case EbvLaunchIdNV: return "LaunchIdNV"; - case EbvLaunchSizeNV: return "LaunchSizeNV"; - case EbvInstanceCustomIndexNV: return "InstanceCustomIndexNV"; - case EbvWorldRayOriginNV: return "WorldRayOriginNV"; - case EbvWorldRayDirectionNV: return "WorldRayDirectionNV"; - case EbvObjectRayOriginNV: return "ObjectRayOriginNV"; - case EbvObjectRayDirectionNV: return "ObjectRayDirectionNV"; - case EbvRayTminNV: return "ObjectRayTminNV"; - case EbvRayTmaxNV: return "ObjectRayTmaxNV"; - case EbvHitTNV: return "HitTNV"; - case EbvHitKindNV: return "HitKindNV"; - case EbvIncomingRayFlagsNV: return "IncomingRayFlagsNV"; - case EbvObjectToWorldNV: return "ObjectToWorldNV"; - case EbvWorldToObjectNV: return "WorldToObjectNV"; + case EbvLaunchId: return "LaunchIdNV"; + case EbvLaunchSize: return "LaunchSizeNV"; + case EbvInstanceCustomIndex: return "InstanceCustomIndexNV"; + case EbvGeometryIndex: return "GeometryIndexEXT"; + case EbvWorldRayOrigin: return "WorldRayOriginNV"; + case EbvWorldRayDirection: return "WorldRayDirectionNV"; + case EbvObjectRayOrigin: return "ObjectRayOriginNV"; + case EbvObjectRayDirection: return "ObjectRayDirectionNV"; + case EbvRayTmin: return "ObjectRayTminNV"; + case EbvRayTmax: return "ObjectRayTmaxNV"; + case EbvHitT: return "HitTNV"; + case EbvHitKind: return "HitKindNV"; + case EbvIncomingRayFlags: return "IncomingRayFlagsNV"; + case EbvObjectToWorld: return "ObjectToWorldNV"; + case EbvWorldToObject: return "WorldToObjectNV"; case EbvBaryCoordNV: return "BaryCoordNV"; case EbvBaryCoordNoPerspNV: return "BaryCoordNoPerspNV"; diff --git a/thirdparty/glslang/glslang/Include/Common.h b/thirdparty/glslang/glslang/Include/Common.h index 2c511bc1c50..b628cdc2f0d 100644 --- a/thirdparty/glslang/glslang/Include/Common.h +++ b/thirdparty/glslang/glslang/Include/Common.h @@ -37,6 +37,17 @@ #ifndef _COMMON_INCLUDED_ #define _COMMON_INCLUDED_ +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include #if defined(__ANDROID__) || (defined(_MSC_VER) && _MSC_VER < 1700) #include @@ -50,9 +61,7 @@ std::string to_string(const T& val) { } #endif -// -- GODOT start -- -#if (defined(_MSC_VER) && _MSC_VER < 1900 /*vs2015*/) /* || defined MINGW_HAS_SECURE_API */ -// -- GODOT end -- +#if (defined(_MSC_VER) && _MSC_VER < 1900 /*vs2015*/) || defined MINGW_HAS_SECURE_API #include #ifndef snprintf #define snprintf sprintf_s @@ -95,18 +104,6 @@ std::string to_string(const T& val) { #pragma warning(disable : 4201) // nameless union #endif -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - #include "PoolAlloc.h" // diff --git a/thirdparty/glslang/glslang/Include/ConstantUnion.h b/thirdparty/glslang/glslang/Include/ConstantUnion.h index 76b2d9c08b7..c4ffb85771b 100644 --- a/thirdparty/glslang/glslang/Include/ConstantUnion.h +++ b/thirdparty/glslang/glslang/Include/ConstantUnion.h @@ -921,7 +921,7 @@ public: else unionArray = new TConstUnionVector(size); } - TConstUnionArray(const TConstUnionArray& a) : unionArray(a.unionArray) { } + TConstUnionArray(const TConstUnionArray& a) = default; TConstUnionArray(const TConstUnionArray& a, int start, int size) { unionArray = new TConstUnionVector(size); diff --git a/thirdparty/glslang/glslang/Include/ResourceLimits.h b/thirdparty/glslang/glslang/Include/ResourceLimits.h index 106b21d9ca8..b670cf163f2 100644 --- a/thirdparty/glslang/glslang/Include/ResourceLimits.h +++ b/thirdparty/glslang/glslang/Include/ResourceLimits.h @@ -142,6 +142,7 @@ struct TBuiltInResource { int maxTaskWorkGroupSizeY_NV; int maxTaskWorkGroupSizeZ_NV; int maxMeshViewCountNV; + int maxDualSourceDrawBuffersEXT; TLimits limits; }; diff --git a/thirdparty/glslang/glslang/Include/Types.h b/thirdparty/glslang/glslang/Include/Types.h index 3572099e3d9..235ea3f1d5e 100644 --- a/thirdparty/glslang/glslang/Include/Types.h +++ b/thirdparty/glslang/glslang/Include/Types.h @@ -3,6 +3,7 @@ // Copyright (C) 2012-2016 LunarG, Inc. // Copyright (C) 2015-2016 Google, Inc. // Copyright (C) 2017 ARM Limited. +// Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved. // // All rights reserved. // @@ -472,6 +473,18 @@ enum TInterlockOrdering { EioCount, }; +enum TShaderInterface +{ + // Includes both uniform blocks and buffer blocks + EsiUniform = 0, + EsiInput, + EsiOutput, + EsiNone, + + EsiCount +}; + + class TQualifier { public: static const int layoutNotSet = -1; @@ -532,6 +545,7 @@ public: queuefamilycoherent = false; workgroupcoherent = false; subgroupcoherent = false; + shadercallcoherent = false; nonprivate = false; volatil = false; restrict = false; @@ -553,6 +567,7 @@ public: // having a constant_id is not sufficient: expressions have no id, but are still specConstant bool specConstant : 1; bool nonUniform : 1; + bool explicitOffset : 1; #ifdef GLSLANG_WEB bool isWriteOnly() const { return false; } @@ -590,6 +605,7 @@ public: bool queuefamilycoherent : 1; bool workgroupcoherent : 1; bool subgroupcoherent : 1; + bool shadercallcoherent : 1; bool nonprivate : 1; bool isWriteOnly() const { return writeonly; } bool isReadOnly() const { return readonly; } @@ -599,11 +615,11 @@ public: bool isSample() const { return sample; } bool isMemory() const { - return subgroupcoherent || workgroupcoherent || queuefamilycoherent || devicecoherent || coherent || volatil || restrict || readonly || writeonly || nonprivate; + return shadercallcoherent || subgroupcoherent || workgroupcoherent || queuefamilycoherent || devicecoherent || coherent || volatil || restrict || readonly || writeonly || nonprivate; } bool isMemoryQualifierImageAndSSBOOnly() const { - return subgroupcoherent || workgroupcoherent || queuefamilycoherent || devicecoherent || coherent || volatil || restrict || readonly || writeonly; + return shadercallcoherent || subgroupcoherent || workgroupcoherent || queuefamilycoherent || devicecoherent || coherent || volatil || restrict || readonly || writeonly; } bool bufferReferenceNeedsVulkanMemoryModel() const { @@ -773,7 +789,7 @@ public: layoutViewportRelative = false; // -2048 as the default value indicating layoutSecondaryViewportRelative is not set layoutSecondaryViewportRelativeOffset = -2048; - layoutShaderRecordNV = false; + layoutShaderRecord = false; layoutBufferReferenceAlign = layoutBufferReferenceAlignEnd; layoutFormat = ElfNone; #endif @@ -812,7 +828,7 @@ public: hasAnyLocation() || hasStream() || hasFormat() || - isShaderRecordNV() || + isShaderRecord() || isPushConstant() || hasBufferReference(); } @@ -871,7 +887,7 @@ public: bool layoutPassthrough; bool layoutViewportRelative; int layoutSecondaryViewportRelativeOffset; - bool layoutShaderRecordNV; + bool layoutShaderRecord; #endif bool hasUniformLayout() const @@ -942,7 +958,7 @@ public: bool hasAttachment() const { return false; } TLayoutFormat getFormat() const { return ElfNone; } bool isPushConstant() const { return false; } - bool isShaderRecordNV() const { return false; } + bool isShaderRecord() const { return false; } bool hasBufferReference() const { return false; } bool hasBufferReferenceAlign() const { return false; } bool isNonUniform() const { return false; } @@ -993,7 +1009,7 @@ public: } TLayoutFormat getFormat() const { return layoutFormat; } bool isPushConstant() const { return layoutPushConstant; } - bool isShaderRecordNV() const { return layoutShaderRecordNV; } + bool isShaderRecord() const { return layoutShaderRecord; } bool hasBufferReference() const { return layoutBufferReference; } bool hasBufferReferenceAlign() const { @@ -1219,6 +1235,7 @@ struct TShaderQualifiers { bool layoutDerivativeGroupQuads; // true if layout derivative_group_quadsNV set bool layoutDerivativeGroupLinear; // true if layout derivative_group_linearNV set int primitives; // mesh shader "max_primitives"DerivativeGroupLinear; // true if layout derivative_group_linearNV set + bool layoutPrimitiveCulling; // true if layout primitive_culling set TLayoutDepth getDepth() const { return layoutDepth; } #else TLayoutDepth getDepth() const { return EldNone; } @@ -1252,6 +1269,7 @@ struct TShaderQualifiers { layoutOverrideCoverage = false; layoutDerivativeGroupQuads = false; layoutDerivativeGroupLinear = false; + layoutPrimitiveCulling = false; primitives = TQualifier::layoutNotSet; interlockOrdering = EioNone; #endif @@ -1315,6 +1333,8 @@ struct TShaderQualifiers { primitives = src.primitives; if (src.interlockOrdering != EioNone) interlockOrdering = src.interlockOrdering; + if (src.layoutPrimitiveCulling) + layoutPrimitiveCulling = src.layoutPrimitiveCulling; #endif } }; @@ -1612,6 +1632,23 @@ public: assert(fieldName); return *fieldName; } + TShaderInterface getShaderInterface() const + { + if (basicType != EbtBlock) + return EsiNone; + + switch (qualifier.storage) { + default: + return EsiNone; + case EvqVaryingIn: + return EsiInput; + case EvqVaryingOut: + return EsiOutput; + case EvqUniform: + case EvqBuffer: + return EsiUniform; + } + } virtual TBasicType getBasicType() const { return basicType; } virtual const TSampler& getSampler() const { return sampler; } @@ -1670,7 +1707,7 @@ public: } virtual bool isOpaque() const { return basicType == EbtSampler #ifndef GLSLANG_WEB - || basicType == EbtAtomicUint || basicType == EbtAccStructNV + || basicType == EbtAtomicUint || basicType == EbtAccStruct || basicType == EbtRayQuery #endif ; } virtual bool isBuiltIn() const { return getQualifier().builtIn != EbvNone; } @@ -1946,7 +1983,8 @@ public: case EbtAtomicUint: return "atomic_uint"; case EbtStruct: return "structure"; case EbtBlock: return "block"; - case EbtAccStructNV: return "accelerationStructureNV"; + case EbtAccStruct: return "accelerationStructureNV"; + case EbtRayQuery: return "rayQueryEXT"; case EbtReference: return "reference"; #endif default: return "unknown type"; @@ -2056,7 +2094,7 @@ public: appendStr(" layoutSecondaryViewportRelativeOffset="); appendInt(qualifier.layoutSecondaryViewportRelativeOffset); } - if (qualifier.layoutShaderRecordNV) + if (qualifier.layoutShaderRecord) appendStr(" shaderRecordNV"); appendStr(")"); @@ -2099,6 +2137,8 @@ public: appendStr(" workgroupcoherent"); if (qualifier.subgroupcoherent) appendStr(" subgroupcoherent"); + if (qualifier.shadercallcoherent) + appendStr(" shadercallcoherent"); if (qualifier.nonprivate) appendStr(" nonprivate"); if (qualifier.volatil) diff --git a/thirdparty/glslang/glslang/Include/glslang_c_interface.h b/thirdparty/glslang/glslang/Include/glslang_c_interface.h new file mode 100644 index 00000000000..4b32e2b85fe --- /dev/null +++ b/thirdparty/glslang/glslang/Include/glslang_c_interface.h @@ -0,0 +1,249 @@ +/** + This code is based on the glslang_c_interface implementation by Viktor Latypov +**/ + +/** +BSD 2-Clause License + +Copyright (c) 2019, Viktor Latypov +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +**/ + +#ifndef GLSLANG_C_IFACE_H_INCLUDED +#define GLSLANG_C_IFACE_H_INCLUDED + +#include +#include + +#include "glslang_c_shader_types.h" + +typedef struct glslang_shader_s glslang_shader_t; +typedef struct glslang_program_s glslang_program_t; + +/* TLimits counterpart */ +typedef struct glslang_limits_s { + bool non_inductive_for_loops; + bool while_loops; + bool do_while_loops; + bool general_uniform_indexing; + bool general_attribute_matrix_vector_indexing; + bool general_varying_indexing; + bool general_sampler_indexing; + bool general_variable_indexing; + bool general_constant_matrix_vector_indexing; +} glslang_limits_t; + +/* TBuiltInResource counterpart */ +typedef struct glslang_resource_s { + int max_lights; + int max_clip_planes; + int max_texture_units; + int max_texture_coords; + int max_vertex_attribs; + int max_vertex_uniform_components; + int max_varying_floats; + int max_vertex_texture_image_units; + int max_combined_texture_image_units; + int max_texture_image_units; + int max_fragment_uniform_components; + int max_draw_buffers; + int max_vertex_uniform_vectors; + int max_varying_vectors; + int max_fragment_uniform_vectors; + int max_vertex_output_vectors; + int max_fragment_input_vectors; + int min_program_texel_offset; + int max_program_texel_offset; + int max_clip_distances; + int max_compute_work_group_count_x; + int max_compute_work_group_count_y; + int max_compute_work_group_count_z; + int max_compute_work_group_size_x; + int max_compute_work_group_size_y; + int max_compute_work_group_size_z; + int max_compute_uniform_components; + int max_compute_texture_image_units; + int max_compute_image_uniforms; + int max_compute_atomic_counters; + int max_compute_atomic_counter_buffers; + int max_varying_components; + int max_vertex_output_components; + int max_geometry_input_components; + int max_geometry_output_components; + int max_fragment_input_components; + int max_image_units; + int max_combined_image_units_and_fragment_outputs; + int max_combined_shader_output_resources; + int max_image_samples; + int max_vertex_image_uniforms; + int max_tess_control_image_uniforms; + int max_tess_evaluation_image_uniforms; + int max_geometry_image_uniforms; + int max_fragment_image_uniforms; + int max_combined_image_uniforms; + int max_geometry_texture_image_units; + int max_geometry_output_vertices; + int max_geometry_total_output_components; + int max_geometry_uniform_components; + int max_geometry_varying_components; + int max_tess_control_input_components; + int max_tess_control_output_components; + int max_tess_control_texture_image_units; + int max_tess_control_uniform_components; + int max_tess_control_total_output_components; + int max_tess_evaluation_input_components; + int max_tess_evaluation_output_components; + int max_tess_evaluation_texture_image_units; + int max_tess_evaluation_uniform_components; + int max_tess_patch_components; + int max_patch_vertices; + int max_tess_gen_level; + int max_viewports; + int max_vertex_atomic_counters; + int max_tess_control_atomic_counters; + int max_tess_evaluation_atomic_counters; + int max_geometry_atomic_counters; + int max_fragment_atomic_counters; + int max_combined_atomic_counters; + int max_atomic_counter_bindings; + int max_vertex_atomic_counter_buffers; + int max_tess_control_atomic_counter_buffers; + int max_tess_evaluation_atomic_counter_buffers; + int max_geometry_atomic_counter_buffers; + int max_fragment_atomic_counter_buffers; + int max_combined_atomic_counter_buffers; + int max_atomic_counter_buffer_size; + int max_transform_feedback_buffers; + int max_transform_feedback_interleaved_components; + int max_cull_distances; + int max_combined_clip_and_cull_distances; + int max_samples; + int max_mesh_output_vertices_nv; + int max_mesh_output_primitives_nv; + int max_mesh_work_group_size_x_nv; + int max_mesh_work_group_size_y_nv; + int max_mesh_work_group_size_z_nv; + int max_task_work_group_size_x_nv; + int max_task_work_group_size_y_nv; + int max_task_work_group_size_z_nv; + int max_mesh_view_count_nv; + int maxDualSourceDrawBuffersEXT; + + glslang_limits_t limits; +} glslang_resource_t; + +typedef struct glslang_input_s { + glslang_source_t language; + glslang_stage_t stage; + glslang_client_t client; + glslang_target_client_version_t client_version; + glslang_target_language_t target_language; + glslang_target_language_version_t target_language_version; + /** Shader source code */ + const char* code; + int default_version; + glslang_profile_t default_profile; + int force_default_version_and_profile; + int forward_compatible; + glslang_messages_t messages; + const glslang_resource_t* resource; +} glslang_input_t; + +/* Inclusion result structure allocated by C include_local/include_system callbacks */ +typedef struct glsl_include_result_s { + /* Header file name or NULL if inclusion failed */ + const char* header_name; + + /* Header contents or NULL */ + const char* header_data; + size_t header_length; + +} glsl_include_result_t; + +/* Callback for local file inclusion */ +typedef glsl_include_result_t* (*glsl_include_local_func)(void* ctx, const char* header_name, const char* includer_name, + size_t include_depth); + +/* Callback for system file inclusion */ +typedef glsl_include_result_t* (*glsl_include_system_func)(void* ctx, const char* header_name, + const char* includer_name, size_t include_depth); + +/* Callback for include result destruction */ +typedef int (*glsl_free_include_result_func)(void* ctx, glsl_include_result_t* result); + +/* Collection of callbacks for GLSL preprocessor */ +typedef struct glsl_include_callbacks_s { + glsl_include_system_func include_system; + glsl_include_local_func include_local; + glsl_free_include_result_func free_include_result; +} glsl_include_callbacks_t; + +#ifdef __cplusplus +extern "C" { +#endif + +#ifdef GLSLANG_IS_SHARED_LIBRARY + #ifdef _WIN32 + #ifdef GLSLANG_EXPORTING + #define GLSLANG_EXPORT __declspec(dllexport) + #else + #define GLSLANG_EXPORT __declspec(dllimport) + #endif + #elif __GNUC__ >= 4 + #define GLSLANG_EXPORT __attribute__((visibility("default"))) + #endif +#endif // GLSLANG_IS_SHARED_LIBRARY + +#ifndef GLSLANG_EXPORT +#define GLSLANG_EXPORT +#endif + +GLSLANG_EXPORT int glslang_initialize_process(); +GLSLANG_EXPORT void glslang_finalize_process(); + +GLSLANG_EXPORT glslang_shader_t* glslang_shader_create(const glslang_input_t* input); +GLSLANG_EXPORT void glslang_shader_delete(glslang_shader_t* shader); +GLSLANG_EXPORT int glslang_shader_preprocess(glslang_shader_t* shader, const glslang_input_t* input); +GLSLANG_EXPORT int glslang_shader_parse(glslang_shader_t* shader, const glslang_input_t* input); +GLSLANG_EXPORT const char* glslang_shader_get_preprocessed_code(glslang_shader_t* shader); +GLSLANG_EXPORT const char* glslang_shader_get_info_log(glslang_shader_t* shader); +GLSLANG_EXPORT const char* glslang_shader_get_info_debug_log(glslang_shader_t* shader); + +GLSLANG_EXPORT glslang_program_t* glslang_program_create(); +GLSLANG_EXPORT void glslang_program_delete(glslang_program_t* program); +GLSLANG_EXPORT void glslang_program_add_shader(glslang_program_t* program, glslang_shader_t* shader); +GLSLANG_EXPORT int glslang_program_link(glslang_program_t* program, int messages); // glslang_messages_t +GLSLANG_EXPORT void glslang_program_SPIRV_generate(glslang_program_t* program, glslang_stage_t stage); +GLSLANG_EXPORT size_t glslang_program_SPIRV_get_size(glslang_program_t* program); +GLSLANG_EXPORT void glslang_program_SPIRV_get(glslang_program_t* program, unsigned int*); +GLSLANG_EXPORT unsigned int* glslang_program_SPIRV_get_ptr(glslang_program_t* program); +GLSLANG_EXPORT const char* glslang_program_SPIRV_get_messages(glslang_program_t* program); +GLSLANG_EXPORT const char* glslang_program_get_info_log(glslang_program_t* program); +GLSLANG_EXPORT const char* glslang_program_get_info_debug_log(glslang_program_t* program); + +#ifdef __cplusplus +} +#endif + +#endif /* #ifdef GLSLANG_C_IFACE_INCLUDED */ diff --git a/thirdparty/glslang/glslang/Include/glslang_c_shader_types.h b/thirdparty/glslang/glslang/Include/glslang_c_shader_types.h new file mode 100644 index 00000000000..d01a115f696 --- /dev/null +++ b/thirdparty/glslang/glslang/Include/glslang_c_shader_types.h @@ -0,0 +1,185 @@ +/** + This code is based on the glslang_c_interface implementation by Viktor Latypov +**/ + +/** +BSD 2-Clause License + +Copyright (c) 2019, Viktor Latypov +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +**/ + +#ifndef C_SHADER_TYPES_H_INCLUDED +#define C_SHADER_TYPES_H_INCLUDED + +#define LAST_ELEMENT_MARKER(x) x + +/* EShLanguage counterpart */ +typedef enum { + GLSLANG_STAGE_VERTEX, + GLSLANG_STAGE_TESSCONTROL, + GLSLANG_STAGE_TESSEVALUATION, + GLSLANG_STAGE_GEOMETRY, + GLSLANG_STAGE_FRAGMENT, + GLSLANG_STAGE_COMPUTE, + GLSLANG_STAGE_RAYGEN_NV, + GLSLANG_STAGE_INTERSECT_NV, + GLSLANG_STAGE_ANYHIT_NV, + GLSLANG_STAGE_CLOSESTHIT_NV, + GLSLANG_STAGE_MISS_NV, + GLSLANG_STAGE_CALLABLE_NV, + GLSLANG_STAGE_TASK_NV, + GLSLANG_STAGE_MESH_NV, + LAST_ELEMENT_MARKER(GLSLANG_STAGE_COUNT), +} glslang_stage_t; // would be better as stage, but this is ancient now + +/* EShLanguageMask counterpart */ +typedef enum { + GLSLANG_STAGE_VERTEX_MASK = (1 << GLSLANG_STAGE_VERTEX), + GLSLANG_STAGE_TESSCONTROL_MASK = (1 << GLSLANG_STAGE_TESSCONTROL), + GLSLANG_STAGE_TESSEVALUATION_MASK = (1 << GLSLANG_STAGE_TESSEVALUATION), + GLSLANG_STAGE_GEOMETRY_MASK = (1 << GLSLANG_STAGE_GEOMETRY), + GLSLANG_STAGE_FRAGMENT_MASK = (1 << GLSLANG_STAGE_FRAGMENT), + GLSLANG_STAGE_COMPUTE_MASK = (1 << GLSLANG_STAGE_COMPUTE), + GLSLANG_STAGE_RAYGEN_NV_MASK = (1 << GLSLANG_STAGE_RAYGEN_NV), + GLSLANG_STAGE_INTERSECT_NV_MASK = (1 << GLSLANG_STAGE_INTERSECT_NV), + GLSLANG_STAGE_ANYHIT_NV_MASK = (1 << GLSLANG_STAGE_ANYHIT_NV), + GLSLANG_STAGE_CLOSESTHIT_NV_MASK = (1 << GLSLANG_STAGE_CLOSESTHIT_NV), + GLSLANG_STAGE_MISS_NV_MASK = (1 << GLSLANG_STAGE_MISS_NV), + GLSLANG_STAGE_CALLABLE_NV_MASK = (1 << GLSLANG_STAGE_CALLABLE_NV), + GLSLANG_STAGE_TASK_NV_MASK = (1 << GLSLANG_STAGE_TASK_NV), + GLSLANG_STAGE_MESH_NV_MASK = (1 << GLSLANG_STAGE_MESH_NV), + LAST_ELEMENT_MARKER(GLSLANG_STAGE_MASK_COUNT), +} glslang_stage_mask_t; + +/* EShSource counterpart */ +typedef enum { + GLSLANG_SOURCE_NONE, + GLSLANG_SOURCE_GLSL, + GLSLANG_SOURCE_HLSL, + LAST_ELEMENT_MARKER(GLSLANG_SOURCE_COUNT), +} glslang_source_t; + +/* EShClient counterpart */ +typedef enum { + GLSLANG_CLIENT_NONE, + GLSLANG_CLIENT_VULKAN, + GLSLANG_CLIENT_OPENGL, + LAST_ELEMENT_MARKER(GLSLANG_CLIENT_COUNT), +} glslang_client_t; + +/* EShTargetLanguage counterpart */ +typedef enum { + GLSLANG_TARGET_NONE, + GLSLANG_TARGET_SPV, + LAST_ELEMENT_MARKER(GLSLANG_TARGET_COUNT), +} glslang_target_language_t; + +/* SH_TARGET_ClientVersion counterpart */ +typedef enum { + GLSLANG_TARGET_VULKAN_1_0 = (1 << 22), + GLSLANG_TARGET_VULKAN_1_1 = (1 << 22) | (1 << 12), + GLSLANG_TARGET_OPENGL_450 = 450, + LAST_ELEMENT_MARKER(GLSLANG_TARGET_CLIENT_VERSION_COUNT), +} glslang_target_client_version_t; + +/* SH_TARGET_LanguageVersion counterpart */ +typedef enum { + GLSLANG_TARGET_SPV_1_0 = (1 << 16), + GLSLANG_TARGET_SPV_1_1 = (1 << 16) | (1 << 8), + GLSLANG_TARGET_SPV_1_2 = (1 << 16) | (2 << 8), + GLSLANG_TARGET_SPV_1_3 = (1 << 16) | (3 << 8), + GLSLANG_TARGET_SPV_1_4 = (1 << 16) | (4 << 8), + GLSLANG_TARGET_SPV_1_5 = (1 << 16) | (5 << 8), + LAST_ELEMENT_MARKER(GLSLANG_TARGET_LANGUAGE_VERSION_COUNT), +} glslang_target_language_version_t; + +/* EShExecutable counterpart */ +typedef enum { GLSLANG_EX_VERTEX_FRAGMENT, GLSLANG_EX_FRAGMENT } glslang_executable_t; + +/* EShOptimizationLevel counterpart */ +typedef enum { + GLSLANG_OPT_NO_GENERATION, + GLSLANG_OPT_NONE, + GLSLANG_OPT_SIMPLE, + GLSLANG_OPT_FULL, + LAST_ELEMENT_MARKER(GLSLANG_OPT_LEVEL_COUNT), +} glslang_optimization_level_t; + +/* EShTextureSamplerTransformMode counterpart */ +typedef enum { + GLSLANG_TEX_SAMP_TRANS_KEEP, + GLSLANG_TEX_SAMP_TRANS_UPGRADE_TEXTURE_REMOVE_SAMPLER, + LAST_ELEMENT_MARKER(GLSLANG_TEX_SAMP_TRANS_COUNT), +} glslang_texture_sampler_transform_mode_t; + +/* EShMessages counterpart */ +typedef enum { + GLSLANG_MSG_DEFAULT_BIT = 0, + GLSLANG_MSG_RELAXED_ERRORS_BIT = (1 << 0), + GLSLANG_MSG_SUPPRESS_WARNINGS_BIT = (1 << 1), + GLSLANG_MSG_AST_BIT = (1 << 2), + GLSLANG_MSG_SPV_RULES_BIT = (1 << 3), + GLSLANG_MSG_VULKAN_RULES_BIT = (1 << 4), + GLSLANG_MSG_ONLY_PREPROCESSOR_BIT = (1 << 5), + GLSLANG_MSG_READ_HLSL_BIT = (1 << 6), + GLSLANG_MSG_CASCADING_ERRORS_BIT = (1 << 7), + GLSLANG_MSG_KEEP_UNCALLED_BIT = (1 << 8), + GLSLANG_MSG_HLSL_OFFSETS_BIT = (1 << 9), + GLSLANG_MSG_DEBUG_INFO_BIT = (1 << 10), + GLSLANG_MSG_HLSL_ENABLE_16BIT_TYPES_BIT = (1 << 11), + GLSLANG_MSG_HLSL_LEGALIZATION_BIT = (1 << 12), + GLSLANG_MSG_HLSL_DX9_COMPATIBLE_BIT = (1 << 13), + GLSLANG_MSG_BUILTIN_SYMBOL_TABLE_BIT = (1 << 14), + LAST_ELEMENT_MARKER(GLSLANG_MSG_COUNT), +} glslang_messages_t; + +/* EShReflectionOptions counterpart */ +typedef enum { + GLSLANG_REFLECTION_DEFAULT_BIT = 0, + GLSLANG_REFLECTION_STRICT_ARRAY_SUFFIX_BIT = (1 << 0), + GLSLANG_REFLECTION_BASIC_ARRAY_SUFFIX_BIT = (1 << 1), + GLSLANG_REFLECTION_INTERMEDIATE_IOO_BIT = (1 << 2), + GLSLANG_REFLECTION_SEPARATE_BUFFERS_BIT = (1 << 3), + GLSLANG_REFLECTION_ALL_BLOCK_VARIABLES_BIT = (1 << 4), + GLSLANG_REFLECTION_UNWRAP_IO_BLOCKS_BIT = (1 << 5), + GLSLANG_REFLECTION_ALL_IO_VARIABLES_BIT = (1 << 6), + GLSLANG_REFLECTION_SHARED_STD140_SSBO_BIT = (1 << 7), + GLSLANG_REFLECTION_SHARED_STD140_UBO_BIT = (1 << 8), + LAST_ELEMENT_MARKER(GLSLANG_REFLECTION_COUNT), +} glslang_reflection_options_t; + +/* EProfile counterpart (from Versions.h) */ +typedef enum { + GLSLANG_BAD_PROFILE = 0, + GLSLANG_NO_PROFILE = (1 << 0), + GLSLANG_CORE_PROFILE = (1 << 1), + GLSLANG_COMPATIBILITY_PROFILE = (1 << 2), + GLSLANG_ES_PROFILE = (1 << 3), + LAST_ELEMENT_MARKER(GLSLANG_PROFILE_COUNT), +} glslang_profile_t; + +#undef LAST_ELEMENT_MARKER + +#endif diff --git a/thirdparty/glslang/glslang/Include/intermediate.h b/thirdparty/glslang/glslang/Include/intermediate.h index 29d58ca635e..30cb6fb1713 100644 --- a/thirdparty/glslang/glslang/Include/intermediate.h +++ b/thirdparty/glslang/glslang/Include/intermediate.h @@ -2,6 +2,7 @@ // Copyright (C) 2002-2005 3Dlabs Inc. Ltd. // Copyright (C) 2012-2016 LunarG, Inc. // Copyright (C) 2017 ARM Limited. +// Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved. // // All rights reserved. // @@ -621,6 +622,8 @@ enum TOperator { EOpIsHelperInvocation, + EOpDebugPrintf, + // // Branch // @@ -908,12 +911,41 @@ enum TOperator { EOpAverageRounded, EOpMul32x16, - EOpTraceNV, - EOpReportIntersectionNV, - EOpIgnoreIntersectionNV, - EOpTerminateRayNV, - EOpExecuteCallableNV, + EOpTrace, + EOpReportIntersection, + EOpIgnoreIntersection, + EOpTerminateRay, + EOpExecuteCallable, EOpWritePackedPrimitiveIndices4x8NV, + + // + // GL_EXT_ray_query operations + // + + EOpRayQueryInitialize, + EOpRayQueryTerminate, + EOpRayQueryGenerateIntersection, + EOpRayQueryConfirmIntersection, + EOpRayQueryProceed, + EOpRayQueryGetIntersectionType, + EOpRayQueryGetRayTMin, + EOpRayQueryGetRayFlags, + EOpRayQueryGetIntersectionT, + EOpRayQueryGetIntersectionInstanceCustomIndex, + EOpRayQueryGetIntersectionInstanceId, + EOpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffset, + EOpRayQueryGetIntersectionGeometryIndex, + EOpRayQueryGetIntersectionPrimitiveIndex, + EOpRayQueryGetIntersectionBarycentrics, + EOpRayQueryGetIntersectionFrontFace, + EOpRayQueryGetIntersectionCandidateAABBOpaque, + EOpRayQueryGetIntersectionObjectRayDirection, + EOpRayQueryGetIntersectionObjectRayOrigin, + EOpRayQueryGetWorldRayDirection, + EOpRayQueryGetWorldRayOrigin, + EOpRayQueryGetIntersectionObjectToWorld, + EOpRayQueryGetIntersectionWorldToObject, + // // HLSL operations // @@ -1199,6 +1231,7 @@ public: TOperator getFlowOp() const { return flowOp; } TIntermTyped* getExpression() const { return expression; } void setExpression(TIntermTyped* pExpression) { expression = pExpression; } + void updatePrecision(TPrecisionQualifier parentPrecision); protected: TOperator flowOp; TIntermTyped* expression; diff --git a/thirdparty/glslang/glslang/Include/revision.h b/thirdparty/glslang/glslang/Include/revision.h deleted file mode 100644 index a0e4b2066cb..00000000000 --- a/thirdparty/glslang/glslang/Include/revision.h +++ /dev/null @@ -1,3 +0,0 @@ -// This header is generated by the make-revision script. - -#define GLSLANG_PATCH_LEVEL 3559 diff --git a/thirdparty/glslang/glslang/MachineIndependent/Constant.cpp b/thirdparty/glslang/glslang/MachineIndependent/Constant.cpp index 98c2666fbb7..e21cf427f06 100644 --- a/thirdparty/glslang/glslang/MachineIndependent/Constant.cpp +++ b/thirdparty/glslang/glslang/MachineIndependent/Constant.cpp @@ -2,7 +2,7 @@ // Copyright (C) 2002-2005 3Dlabs Inc. Ltd. // Copyright (C) 2012-2013 LunarG, Inc. // Copyright (C) 2017 ARM Limited. -// Copyright (C) 2018 Google, Inc. +// Copyright (C) 2018-2020 Google, Inc. // // All rights reserved. // @@ -1012,6 +1012,7 @@ TIntermTyped* TIntermediate::fold(TIntermAggregate* aggrNode) case EOpMin: case EOpMax: case EOpMix: + case EOpMod: case EOpClamp: case EOpLessThan: case EOpGreaterThan: @@ -1074,6 +1075,14 @@ TIntermTyped* TIntermediate::fold(TIntermAggregate* aggrNode) case EOpPow: newConstArray[comp].setDConst(pow(childConstUnions[0][arg0comp].getDConst(), childConstUnions[1][arg1comp].getDConst())); break; + case EOpMod: + { + double arg0 = childConstUnions[0][arg0comp].getDConst(); + double arg1 = childConstUnions[1][arg1comp].getDConst(); + double result = arg0 - arg1 * floor(arg0 / arg1); + newConstArray[comp].setDConst(result); + break; + } case EOpMin: switch(children[0]->getAsTyped()->getBasicType()) { case EbtFloat16: diff --git a/thirdparty/glslang/glslang/MachineIndependent/Initialize.cpp b/thirdparty/glslang/glslang/MachineIndependent/Initialize.cpp index de55742649a..8d5d04fb439 100644 --- a/thirdparty/glslang/glslang/MachineIndependent/Initialize.cpp +++ b/thirdparty/glslang/glslang/MachineIndependent/Initialize.cpp @@ -1,8 +1,9 @@ // // Copyright (C) 2002-2005 3Dlabs Inc. Ltd. // Copyright (C) 2012-2016 LunarG, Inc. -// Copyright (C) 2015-2018 Google, Inc. +// Copyright (C) 2015-2020 Google, Inc. // Copyright (C) 2017 ARM Limited. +// Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved. // // All rights reserved. // @@ -125,8 +126,6 @@ enum ArgClass { }; // Mixtures of the above, to help the function tables const ArgClass ClassV1FIOCV = (ArgClass)(ClassV1 | ClassFIO | ClassCV); -const ArgClass ClassV1FOCV = (ArgClass)(ClassV1 | ClassFO | ClassCV); -const ArgClass ClassV1CV = (ArgClass)(ClassV1 | ClassCV); const ArgClass ClassBNS = (ArgClass)(ClassB | ClassNS); const ArgClass ClassRSNS = (ArgClass)(ClassRS | ClassNS); @@ -147,17 +146,21 @@ EProfile EDesktopProfile = static_cast(ENoProfile | ECoreProfile | ECo // Declare pointers to put into the table for versioning. #ifdef GLSLANG_WEB const Versioning* Es300Desktop130 = nullptr; - const Versioning* Es310Desktop430 = nullptr; + const Versioning* Es310Desktop420 = nullptr; +#elif defined(GLSLANG_ANGLE) + const Versioning* Es300Desktop130 = nullptr; + const Versioning* Es310Desktop420 = nullptr; + const Versioning* Es310Desktop450 = nullptr; #else const Versioning Es300Desktop130Version[] = { { EEsProfile, 0, 300, 0, nullptr }, { EDesktopProfile, 0, 130, 0, nullptr }, { EBadProfile } }; const Versioning* Es300Desktop130 = &Es300Desktop130Version[0]; - const Versioning Es310Desktop430Version[] = { { EEsProfile, 0, 310, 0, nullptr }, - { EDesktopProfile, 0, 430, 0, nullptr }, + const Versioning Es310Desktop420Version[] = { { EEsProfile, 0, 310, 0, nullptr }, + { EDesktopProfile, 0, 420, 0, nullptr }, { EBadProfile } }; - const Versioning* Es310Desktop430 = &Es310Desktop430Version[0]; + const Versioning* Es310Desktop420 = &Es310Desktop420Version[0]; const Versioning Es310Desktop450Version[] = { { EEsProfile, 0, 310, 0, nullptr }, { EDesktopProfile, 0, 450, 0, nullptr }, @@ -257,14 +260,14 @@ const BuiltInFunction BaseFunctions[] = { { EOpGreaterThanEqual, "greaterThanEqual", 2, TypeU, ClassBNS, Es300Desktop130 }, { EOpVectorEqual, "equal", 2, TypeU, ClassBNS, Es300Desktop130 }, { EOpVectorNotEqual, "notEqual", 2, TypeU, ClassBNS, Es300Desktop130 }, - { EOpAtomicAdd, "atomicAdd", 2, TypeIU, ClassV1FIOCV, Es310Desktop430 }, - { EOpAtomicMin, "atomicMin", 2, TypeIU, ClassV1FIOCV, Es310Desktop430 }, - { EOpAtomicMax, "atomicMax", 2, TypeIU, ClassV1FIOCV, Es310Desktop430 }, - { EOpAtomicAnd, "atomicAnd", 2, TypeIU, ClassV1FIOCV, Es310Desktop430 }, - { EOpAtomicOr, "atomicOr", 2, TypeIU, ClassV1FIOCV, Es310Desktop430 }, - { EOpAtomicXor, "atomicXor", 2, TypeIU, ClassV1FIOCV, Es310Desktop430 }, - { EOpAtomicExchange, "atomicExchange", 2, TypeIU, ClassV1FIOCV, Es310Desktop430 }, - { EOpAtomicCompSwap, "atomicCompSwap", 3, TypeIU, ClassV1FIOCV, Es310Desktop430 }, + { EOpAtomicAdd, "atomicAdd", 2, TypeIU, ClassV1FIOCV, Es310Desktop420 }, + { EOpAtomicMin, "atomicMin", 2, TypeIU, ClassV1FIOCV, Es310Desktop420 }, + { EOpAtomicMax, "atomicMax", 2, TypeIU, ClassV1FIOCV, Es310Desktop420 }, + { EOpAtomicAnd, "atomicAnd", 2, TypeIU, ClassV1FIOCV, Es310Desktop420 }, + { EOpAtomicOr, "atomicOr", 2, TypeIU, ClassV1FIOCV, Es310Desktop420 }, + { EOpAtomicXor, "atomicXor", 2, TypeIU, ClassV1FIOCV, Es310Desktop420 }, + { EOpAtomicExchange, "atomicExchange", 2, TypeIU, ClassV1FIOCV, Es310Desktop420 }, + { EOpAtomicCompSwap, "atomicCompSwap", 3, TypeIU, ClassV1FIOCV, Es310Desktop420 }, #ifndef GLSLANG_WEB { EOpMix, "mix", 3, TypeB, ClassRegular, Es310Desktop450 }, { EOpMix, "mix", 3, TypeIU, ClassLB, Es310Desktop450 }, @@ -416,7 +419,7 @@ void AddTabledBuiltin(TString& decls, const BuiltInFunction& function) // See if the tabled versioning information allows the current version. bool ValidVersion(const BuiltInFunction& function, int version, EProfile profile, const SpvVersion& /* spVersion */) { -#ifdef GLSLANG_WEB +#if defined(GLSLANG_WEB) || defined(GLSLANG_ANGLE) // all entries in table are valid return true; #endif @@ -500,7 +503,7 @@ TBuiltIns::TBuiltIns() prefixes[EbtFloat] = ""; prefixes[EbtInt] = "i"; prefixes[EbtUint] = "u"; -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) prefixes[EbtFloat16] = "f16"; prefixes[EbtInt8] = "i8"; prefixes[EbtUint8] = "u8"; @@ -517,7 +520,9 @@ TBuiltIns::TBuiltIns() dimMap[Esd3D] = 3; dimMap[EsdCube] = 3; #ifndef GLSLANG_WEB +#ifndef GLSLANG_ANGLE dimMap[Esd1D] = 1; +#endif dimMap[EsdRect] = 2; dimMap[EsdBuffer] = 1; dimMap[EsdSubpass] = 2; // potentially unused for now @@ -542,6 +547,9 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV #ifdef GLSLANG_WEB version = 310; profile = EEsProfile; +#elif defined(GLSLANG_ANGLE) + version = 450; + profile = ECoreProfile; #endif addTabledBuiltins(version, profile, spvVersion); @@ -587,6 +595,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV "vec4 fwidthCoarse(vec4 p);" ); +#ifndef GLSLANG_ANGLE TString derivativesAndControl16bits ( "float16_t dFdx(float16_t);" "f16vec2 dFdx(f16vec2);" @@ -1174,6 +1183,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV "\n" ); } +#endif // !GLSLANG_ANGLE if ((profile == EEsProfile && version >= 310) || (profile != EEsProfile && version >= 430)) { @@ -1211,6 +1221,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV "\n"); } +#ifndef GLSLANG_ANGLE if (profile != EEsProfile && version >= 440) { commonBuiltins.append( "uint64_t atomicMin(coherent volatile inout uint64_t, uint64_t);" @@ -1242,11 +1253,19 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV " int64_t atomicAdd(coherent volatile inout int64_t, int64_t);" "uint64_t atomicAdd(coherent volatile inout uint64_t, uint64_t, int, int, int);" " int64_t atomicAdd(coherent volatile inout int64_t, int64_t, int, int, int);" + " float atomicAdd(coherent volatile inout float, float);" + " float atomicAdd(coherent volatile inout float, float, int, int, int);" + " double atomicAdd(coherent volatile inout double, double);" + " double atomicAdd(coherent volatile inout double, double, int, int, int);" "uint64_t atomicExchange(coherent volatile inout uint64_t, uint64_t);" " int64_t atomicExchange(coherent volatile inout int64_t, int64_t);" "uint64_t atomicExchange(coherent volatile inout uint64_t, uint64_t, int, int, int);" " int64_t atomicExchange(coherent volatile inout int64_t, int64_t, int, int, int);" + " float atomicExchange(coherent volatile inout float, float);" + " float atomicExchange(coherent volatile inout float, float, int, int, int);" + " double atomicExchange(coherent volatile inout double, double);" + " double atomicExchange(coherent volatile inout double, double, int, int, int);" "uint64_t atomicCompSwap(coherent volatile inout uint64_t, uint64_t, uint64_t);" " int64_t atomicCompSwap(coherent volatile inout int64_t, int64_t, int64_t);" @@ -1255,15 +1274,20 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV "uint64_t atomicLoad(coherent volatile in uint64_t, int, int, int);" " int64_t atomicLoad(coherent volatile in int64_t, int, int, int);" + " float atomicLoad(coherent volatile in float, int, int, int);" + " double atomicLoad(coherent volatile in double, int, int, int);" "void atomicStore(coherent volatile out uint64_t, uint64_t, int, int, int);" "void atomicStore(coherent volatile out int64_t, int64_t, int, int, int);" + "void atomicStore(coherent volatile out float, float, int, int, int);" + "void atomicStore(coherent volatile out double, double, int, int, int);" "\n"); } -#endif +#endif // !GLSLANG_ANGLE +#endif // !GLSLANG_WEB if ((profile == EEsProfile && version >= 300) || - (profile != EEsProfile && version >= 330)) { + (profile != EEsProfile && version >= 150)) { // GL_ARB_shader_bit_encoding commonBuiltins.append( "int floatBitsToInt(highp float value);" "ivec2 floatBitsToInt(highp vec2 value);" @@ -1300,6 +1324,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV "\n"); } +#ifndef GLSLANG_ANGLE if (profile != EEsProfile && version >= 150) { // ARB_gpu_shader_fp64 commonBuiltins.append( "double fma(double, double, double);" @@ -1308,6 +1333,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV "dvec4 fma(dvec4, dvec4, dvec4 );" "\n"); } +#endif if ((profile == EEsProfile && version >= 310) || (profile != EEsProfile && version >= 400)) { @@ -1325,6 +1351,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV "\n"); } +#ifndef GLSLANG_ANGLE if (profile != EEsProfile && version >= 150) { // ARB_gpu_shader_fp64 commonBuiltins.append( "double frexp(double, out int);" @@ -1342,10 +1369,11 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV "\n"); } +#endif #endif if ((profile == EEsProfile && version >= 300) || - (profile != EEsProfile && version >= 400)) { + (profile != EEsProfile && version >= 150)) { commonBuiltins.append( "highp uint packUnorm2x16(vec2);" "vec2 unpackUnorm2x16(highp uint);" @@ -1353,7 +1381,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV } if ((profile == EEsProfile && version >= 300) || - (profile != EEsProfile && version >= 420)) { + (profile != EEsProfile && version >= 150)) { commonBuiltins.append( "highp uint packSnorm2x16(vec2);" " vec2 unpackSnorm2x16(highp uint);" @@ -1365,7 +1393,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV commonBuiltins.append( "mediump vec2 unpackHalf2x16(highp uint);" "\n"); - } else if (profile != EEsProfile && version >= 420) { + } else if (profile != EEsProfile && version >= 150) { commonBuiltins.append( " vec2 unpackHalf2x16(highp uint);" "\n"); @@ -1373,7 +1401,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV #ifndef GLSLANG_WEB if ((profile == EEsProfile && version >= 310) || - (profile != EEsProfile && version >= 400)) { + (profile != EEsProfile && version >= 150)) { commonBuiltins.append( "highp uint packSnorm4x8(vec4);" "highp uint packUnorm4x8(vec4);" @@ -1385,7 +1413,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV "mediump vec4 unpackSnorm4x8(highp uint);" "mediump vec4 unpackUnorm4x8(highp uint);" "\n"); - } else if (profile != EEsProfile && version >= 400) { + } else if (profile != EEsProfile && version >= 150) { commonBuiltins.append( "vec4 unpackSnorm4x8(highp uint);" "vec4 unpackUnorm4x8(highp uint);" @@ -1451,6 +1479,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV } #ifndef GLSLANG_WEB +#ifndef GLSLANG_ANGLE // // Original-style texture functions existing in all stages. // (Per-stage functions below.) @@ -1599,6 +1628,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV "\n"); } } +#endif // !GLSLANG_ANGLE // Bitfield if ((profile == EEsProfile && version >= 310) || @@ -1741,6 +1771,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV "\n"); } +#ifndef GLSLANG_ANGLE // GL_ARB_shader_ballot if (profile != EEsProfile && version >= 450) { commonBuiltins.append( @@ -3061,7 +3092,27 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV "bool textureFootprintGradClampNV(sampler2D, vec2, vec2, vec2, float, int, bool, out gl_TextureFootprint2DNV);" "\n"); } +#endif // !GLSLANG_ANGLE + if ((profile == EEsProfile && version >= 300 && version < 310) || + (profile != EEsProfile && version >= 150 && version < 450)) { // GL_EXT_shader_integer_mix + commonBuiltins.append("int mix(int, int, bool);" + "ivec2 mix(ivec2, ivec2, bvec2);" + "ivec3 mix(ivec3, ivec3, bvec3);" + "ivec4 mix(ivec4, ivec4, bvec4);" + "uint mix(uint, uint, bool );" + "uvec2 mix(uvec2, uvec2, bvec2);" + "uvec3 mix(uvec3, uvec3, bvec3);" + "uvec4 mix(uvec4, uvec4, bvec4);" + "bool mix(bool, bool, bool );" + "bvec2 mix(bvec2, bvec2, bvec2);" + "bvec3 mix(bvec3, bvec3, bvec3);" + "bvec4 mix(bvec4, bvec4, bvec4);" + + "\n"); + } + +#ifndef GLSLANG_ANGLE // GL_AMD_gpu_shader_half_float/Explicit types if (profile != EEsProfile && version >= 450) { commonBuiltins.append( @@ -3912,28 +3963,30 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV "f64vec3 log2(f64vec3);" "f64vec4 log2(f64vec4);" "\n"); - } - if (profile != EEsProfile && version >= 450) { - stageBuiltins[EShLangFragment].append(derivativesAndControl64bits); - stageBuiltins[EShLangFragment].append( - "float64_t interpolateAtCentroid(float64_t);" - "f64vec2 interpolateAtCentroid(f64vec2);" - "f64vec3 interpolateAtCentroid(f64vec3);" - "f64vec4 interpolateAtCentroid(f64vec4);" + } - "float64_t interpolateAtSample(float64_t, int);" - "f64vec2 interpolateAtSample(f64vec2, int);" - "f64vec3 interpolateAtSample(f64vec3, int);" - "f64vec4 interpolateAtSample(f64vec4, int);" + if (profile != EEsProfile && version >= 450) { + stageBuiltins[EShLangFragment].append(derivativesAndControl64bits); + stageBuiltins[EShLangFragment].append( + "float64_t interpolateAtCentroid(float64_t);" + "f64vec2 interpolateAtCentroid(f64vec2);" + "f64vec3 interpolateAtCentroid(f64vec3);" + "f64vec4 interpolateAtCentroid(f64vec4);" - "float64_t interpolateAtOffset(float64_t, f64vec2);" - "f64vec2 interpolateAtOffset(f64vec2, f64vec2);" - "f64vec3 interpolateAtOffset(f64vec3, f64vec2);" - "f64vec4 interpolateAtOffset(f64vec4, f64vec2);" + "float64_t interpolateAtSample(float64_t, int);" + "f64vec2 interpolateAtSample(f64vec2, int);" + "f64vec3 interpolateAtSample(f64vec3, int);" + "f64vec4 interpolateAtSample(f64vec4, int);" - "\n"); + "float64_t interpolateAtOffset(float64_t, f64vec2);" + "f64vec2 interpolateAtOffset(f64vec2, f64vec2);" + "f64vec3 interpolateAtOffset(f64vec3, f64vec2);" + "f64vec4 interpolateAtOffset(f64vec4, f64vec2);" + + "\n"); } +#endif // !GLSLANG_ANGLE //============================================================================ // @@ -3949,6 +4002,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV if (spvVersion.vulkan == 0 && IncludeLegacy(version, profile, spvVersion)) stageBuiltins[EShLangVertex].append("vec4 ftransform();"); +#ifndef GLSLANG_ANGLE // // Original-style texture Functions with lod. // @@ -4008,6 +4062,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV "\n"); } } +#endif // !GLSLANG_ANGLE if ((profile != EEsProfile && version >= 150) || (profile == EEsProfile && version >= 310)) { @@ -4028,7 +4083,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV "void EndPrimitive();" "\n"); } -#endif +#endif // !GLSLANG_WEB //============================================================================ // @@ -4067,10 +4122,10 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV } #ifndef GLSLANG_WEB if ((profile != EEsProfile && version >= 420) || esBarrier) { - commonBuiltins.append( - "void memoryBarrierAtomicCounter();" - "void memoryBarrierImage();" - ); + if (spvVersion.vulkan == 0) { + commonBuiltins.append("void memoryBarrierAtomicCounter();"); + } + commonBuiltins.append("void memoryBarrierImage();"); } if ((profile != EEsProfile && version >= 450) || (profile == EEsProfile && version >= 320)) { stageBuiltins[EShLangMeshNV].append( @@ -4086,6 +4141,9 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV commonBuiltins.append("void controlBarrier(int, int, int, int);\n" "void memoryBarrier(int, int, int);\n"); + commonBuiltins.append("void debugPrintfEXT();\n"); + +#ifndef GLSLANG_ANGLE if (profile != EEsProfile && version >= 450) { // coopMatStoreNV perhaps ought to have "out" on the buf parameter, but // adding it introduces undesirable tempArgs on the stack. What we want @@ -4209,6 +4267,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV "\n"); } +#endif // !GLSLANG_ANGLE // GL_ARB_derivative_control if (profile != EEsProfile && version >= 400) { @@ -4246,6 +4305,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV "bool helperInvocationEXT();" "\n"); +#ifndef GLSLANG_ANGLE // GL_AMD_shader_explicit_vertex_parameter if (profile != EEsProfile && version >= 450) { stageBuiltins[EShLangFragment].append( @@ -4320,37 +4380,74 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV "\n"); } - // Builtins for GL_NV_ray_tracing + // Builtins for GL_NV_ray_tracing/GL_EXT_ray_tracing/GL_EXT_ray_query if (profile != EEsProfile && version >= 460) { - stageBuiltins[EShLangRayGenNV].append( - "void traceNV(accelerationStructureNV,uint,uint,uint,uint,uint,vec3,float,vec3,float,int);" - "void executeCallableNV(uint, int);" + commonBuiltins.append("void rayQueryInitializeEXT(rayQueryEXT, accelerationStructureEXT, uint, uint, vec3, float, vec3, float);" + "void rayQueryTerminateEXT(rayQueryEXT);" + "void rayQueryGenerateIntersectionEXT(rayQueryEXT, float);" + "void rayQueryConfirmIntersectionEXT(rayQueryEXT);" + "bool rayQueryProceedEXT(rayQueryEXT);" + "uint rayQueryGetIntersectionTypeEXT(rayQueryEXT, bool);" + "float rayQueryGetRayTMinEXT(rayQueryEXT);" + "uint rayQueryGetRayFlagsEXT(rayQueryEXT);" + "vec3 rayQueryGetWorldRayOriginEXT(rayQueryEXT);" + "vec3 rayQueryGetWorldRayDirectionEXT(rayQueryEXT);" + "float rayQueryGetIntersectionTEXT(rayQueryEXT, bool);" + "int rayQueryGetIntersectionInstanceCustomIndexEXT(rayQueryEXT, bool);" + "int rayQueryGetIntersectionInstanceIdEXT(rayQueryEXT, bool);" + "uint rayQueryGetIntersectionInstanceShaderBindingTableRecordOffsetEXT(rayQueryEXT, bool);" + "int rayQueryGetIntersectionGeometryIndexEXT(rayQueryEXT, bool);" + "int rayQueryGetIntersectionPrimitiveIndexEXT(rayQueryEXT, bool);" + "vec2 rayQueryGetIntersectionBarycentricsEXT(rayQueryEXT, bool);" + "bool rayQueryGetIntersectionFrontFaceEXT(rayQueryEXT, bool);" + "bool rayQueryGetIntersectionCandidateAABBOpaqueEXT(rayQueryEXT);" + "vec3 rayQueryGetIntersectionObjectRayDirectionEXT(rayQueryEXT, bool);" + "vec3 rayQueryGetIntersectionObjectRayOriginEXT(rayQueryEXT, bool);" + "mat4x3 rayQueryGetIntersectionObjectToWorldEXT(rayQueryEXT, bool);" + "mat4x3 rayQueryGetIntersectionWorldToObjectEXT(rayQueryEXT, bool);" "\n"); - stageBuiltins[EShLangIntersectNV].append( + + stageBuiltins[EShLangRayGen].append( + "void traceNV(accelerationStructureNV,uint,uint,uint,uint,uint,vec3,float,vec3,float,int);" + "void traceRayEXT(accelerationStructureEXT,uint,uint,uint,uint,uint,vec3,float,vec3,float,int);" + "void executeCallableNV(uint, int);" + "void executeCallableEXT(uint, int);" + "\n"); + stageBuiltins[EShLangIntersect].append( "bool reportIntersectionNV(float, uint);" + "bool reportIntersectionEXT(float, uint);" "\n"); - stageBuiltins[EShLangAnyHitNV].append( + stageBuiltins[EShLangAnyHit].append( "void ignoreIntersectionNV();" + "void ignoreIntersectionEXT();" "void terminateRayNV();" + "void terminateRayEXT();" "\n"); - stageBuiltins[EShLangClosestHitNV].append( + stageBuiltins[EShLangClosestHit].append( "void traceNV(accelerationStructureNV,uint,uint,uint,uint,uint,vec3,float,vec3,float,int);" + "void traceRayEXT(accelerationStructureEXT,uint,uint,uint,uint,uint,vec3,float,vec3,float,int);" "void executeCallableNV(uint, int);" + "void executeCallableEXT(uint, int);" "\n"); - stageBuiltins[EShLangMissNV].append( + stageBuiltins[EShLangMiss].append( "void traceNV(accelerationStructureNV,uint,uint,uint,uint,uint,vec3,float,vec3,float,int);" + "void traceRayEXT(accelerationStructureEXT,uint,uint,uint,uint,uint,vec3,float,vec3,float,int);" "void executeCallableNV(uint, int);" + "void executeCallableEXT(uint, int);" "\n"); - stageBuiltins[EShLangCallableNV].append( + stageBuiltins[EShLangCallable].append( "void executeCallableNV(uint, int);" + "void executeCallableEXT(uint, int);" "\n"); } +#endif // !GLSLANG_ANGLE //E_SPV_NV_compute_shader_derivatives if ((profile == EEsProfile && version >= 320) || (profile != EEsProfile && version >= 450)) { stageBuiltins[EShLangCompute].append(derivativeControls); stageBuiltins[EShLangCompute].append("\n"); } +#ifndef GLSLANG_ANGLE if (profile != EEsProfile && version >= 450) { stageBuiltins[EShLangCompute].append(derivativesAndControl16bits); stageBuiltins[EShLangCompute].append(derivativesAndControl64bits); @@ -4363,7 +4460,8 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV "void writePackedPrimitiveIndices4x8NV(uint, uint);" "\n"); } -#endif +#endif // !GLSLANG_ANGLE +#endif // !GLSLANG_WEB //============================================================================ // @@ -4400,7 +4498,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV "\n"); } -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) if (spvVersion.spv == 0 && IncludeLegacy(version, profile, spvVersion)) { // // Matrix state. p. 31, 32, 37, 39, 40. @@ -4518,7 +4616,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV "\n"); } -#endif +#endif // !GLSLANG_WEB && !GLSLANG_ANGLE //============================================================================ // @@ -4549,6 +4647,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV } #ifndef GLSLANG_WEB +#ifndef GLSLANG_ANGLE //============================================================================ // // Define the interface to the mesh/task shader. @@ -4636,6 +4735,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV "\n"); } } +#endif // !GLSLANG_ANGLE //============================================================================ // @@ -5306,6 +5406,15 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV #ifndef GLSLANG_WEB + if ((profile != EEsProfile && version >= 140) || + (profile == EEsProfile && version >= 310)) { + stageBuiltins[EShLangFragment].append( + "flat in highp int gl_DeviceIndex;" // GL_EXT_device_group + "flat in highp int gl_ViewIndex;" // GL_EXT_multiview + "\n"); + } + +#ifndef GLSLANG_ANGLE // GL_ARB_shader_ballot if (profile != EEsProfile && version >= 450) { const char* ballotDecls = @@ -5336,14 +5445,6 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV stageBuiltins[EShLangTaskNV] .append(ballotDecls); } - if ((profile != EEsProfile && version >= 140) || - (profile == EEsProfile && version >= 310)) { - stageBuiltins[EShLangFragment].append( - "flat in highp int gl_DeviceIndex;" // GL_EXT_device_group - "flat in highp int gl_ViewIndex;" // GL_EXT_multiview - "\n"); - } - // GL_KHR_shader_subgroup if ((profile == EEsProfile && version >= 310) || (profile != EEsProfile && version >= 140)) { @@ -5391,118 +5492,173 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV stageBuiltins[EShLangMeshNV] .append(computeSubgroupDecls); stageBuiltins[EShLangTaskNV] .append(subgroupDecls); stageBuiltins[EShLangTaskNV] .append(computeSubgroupDecls); - stageBuiltins[EShLangRayGenNV] .append(subgroupDecls); - stageBuiltins[EShLangIntersectNV] .append(subgroupDecls); - stageBuiltins[EShLangAnyHitNV] .append(subgroupDecls); - stageBuiltins[EShLangClosestHitNV] .append(subgroupDecls); - stageBuiltins[EShLangMissNV] .append(subgroupDecls); - stageBuiltins[EShLangCallableNV] .append(subgroupDecls); + stageBuiltins[EShLangRayGen] .append(subgroupDecls); + stageBuiltins[EShLangIntersect] .append(subgroupDecls); + stageBuiltins[EShLangAnyHit] .append(subgroupDecls); + stageBuiltins[EShLangClosestHit] .append(subgroupDecls); + stageBuiltins[EShLangMiss] .append(subgroupDecls); + stageBuiltins[EShLangCallable] .append(subgroupDecls); } - // GL_NV_ray_tracing + // GL_NV_ray_tracing/GL_EXT_ray_tracing if (profile != EEsProfile && version >= 460) { const char *constRayFlags = "const uint gl_RayFlagsNoneNV = 0U;" + "const uint gl_RayFlagsNoneEXT = 0U;" "const uint gl_RayFlagsOpaqueNV = 1U;" + "const uint gl_RayFlagsOpaqueEXT = 1U;" "const uint gl_RayFlagsNoOpaqueNV = 2U;" + "const uint gl_RayFlagsNoOpaqueEXT = 2U;" "const uint gl_RayFlagsTerminateOnFirstHitNV = 4U;" + "const uint gl_RayFlagsTerminateOnFirstHitEXT = 4U;" "const uint gl_RayFlagsSkipClosestHitShaderNV = 8U;" + "const uint gl_RayFlagsSkipClosestHitShaderEXT = 8U;" "const uint gl_RayFlagsCullBackFacingTrianglesNV = 16U;" + "const uint gl_RayFlagsCullBackFacingTrianglesEXT = 16U;" "const uint gl_RayFlagsCullFrontFacingTrianglesNV = 32U;" + "const uint gl_RayFlagsCullFrontFacingTrianglesEXT = 32U;" "const uint gl_RayFlagsCullOpaqueNV = 64U;" + "const uint gl_RayFlagsCullOpaqueEXT = 64U;" "const uint gl_RayFlagsCullNoOpaqueNV = 128U;" + "const uint gl_RayFlagsCullNoOpaqueEXT = 128U;" + "const uint gl_RayFlagsSkipTrianglesEXT = 256U;" + "const uint gl_RayFlagsSkipAABBEXT = 512U;" + "const uint gl_HitKindFrontFacingTriangleEXT = 254U;" + "const uint gl_HitKindBackFacingTriangleEXT = 255U;" "\n"; + + const char *constRayQueryIntersection = + "const uint gl_RayQueryCandidateIntersectionEXT = 0U;" + "const uint gl_RayQueryCommittedIntersectionEXT = 1U;" + "const uint gl_RayQueryCommittedIntersectionNoneEXT = 0U;" + "const uint gl_RayQueryCommittedIntersectionTriangleEXT = 1U;" + "const uint gl_RayQueryCommittedIntersectionGeneratedEXT = 2U;" + "const uint gl_RayQueryCandidateIntersectionTriangleEXT = 0U;" + "const uint gl_RayQueryCandidateIntersectionAABBEXT = 1U;" + "\n"; + const char *rayGenDecls = "in uvec3 gl_LaunchIDNV;" + "in uvec3 gl_LaunchIDEXT;" "in uvec3 gl_LaunchSizeNV;" + "in uvec3 gl_LaunchSizeEXT;" "\n"; const char *intersectDecls = "in uvec3 gl_LaunchIDNV;" + "in uvec3 gl_LaunchIDEXT;" "in uvec3 gl_LaunchSizeNV;" + "in uvec3 gl_LaunchSizeEXT;" "in int gl_PrimitiveID;" "in int gl_InstanceID;" "in int gl_InstanceCustomIndexNV;" + "in int gl_InstanceCustomIndexEXT;" + "in int gl_GeometryIndexEXT;" "in vec3 gl_WorldRayOriginNV;" + "in vec3 gl_WorldRayOriginEXT;" "in vec3 gl_WorldRayDirectionNV;" + "in vec3 gl_WorldRayDirectionEXT;" "in vec3 gl_ObjectRayOriginNV;" + "in vec3 gl_ObjectRayOriginEXT;" "in vec3 gl_ObjectRayDirectionNV;" + "in vec3 gl_ObjectRayDirectionEXT;" "in float gl_RayTminNV;" + "in float gl_RayTminEXT;" "in float gl_RayTmaxNV;" + "in float gl_RayTmaxEXT;" "in mat4x3 gl_ObjectToWorldNV;" + "in mat4x3 gl_ObjectToWorldEXT;" + "in mat3x4 gl_ObjectToWorld3x4EXT;" "in mat4x3 gl_WorldToObjectNV;" + "in mat4x3 gl_WorldToObjectEXT;" + "in mat3x4 gl_WorldToObject3x4EXT;" "in uint gl_IncomingRayFlagsNV;" + "in uint gl_IncomingRayFlagsEXT;" "\n"; const char *hitDecls = "in uvec3 gl_LaunchIDNV;" + "in uvec3 gl_LaunchIDEXT;" "in uvec3 gl_LaunchSizeNV;" + "in uvec3 gl_LaunchSizeEXT;" "in int gl_PrimitiveID;" "in int gl_InstanceID;" "in int gl_InstanceCustomIndexNV;" + "in int gl_InstanceCustomIndexEXT;" + "in int gl_GeometryIndexEXT;" "in vec3 gl_WorldRayOriginNV;" + "in vec3 gl_WorldRayOriginEXT;" "in vec3 gl_WorldRayDirectionNV;" + "in vec3 gl_WorldRayDirectionEXT;" "in vec3 gl_ObjectRayOriginNV;" + "in vec3 gl_ObjectRayOriginEXT;" "in vec3 gl_ObjectRayDirectionNV;" + "in vec3 gl_ObjectRayDirectionEXT;" "in float gl_RayTminNV;" + "in float gl_RayTminEXT;" "in float gl_RayTmaxNV;" + "in float gl_RayTmaxEXT;" "in float gl_HitTNV;" + "in float gl_HitTEXT;" "in uint gl_HitKindNV;" + "in uint gl_HitKindEXT;" "in mat4x3 gl_ObjectToWorldNV;" + "in mat4x3 gl_ObjectToWorldEXT;" + "in mat3x4 gl_ObjectToWorld3x4EXT;" "in mat4x3 gl_WorldToObjectNV;" + "in mat4x3 gl_WorldToObjectEXT;" + "in mat3x4 gl_WorldToObject3x4EXT;" "in uint gl_IncomingRayFlagsNV;" + "in uint gl_IncomingRayFlagsEXT;" "\n"; const char *missDecls = "in uvec3 gl_LaunchIDNV;" + "in uvec3 gl_LaunchIDEXT;" "in uvec3 gl_LaunchSizeNV;" + "in uvec3 gl_LaunchSizeEXT;" "in vec3 gl_WorldRayOriginNV;" + "in vec3 gl_WorldRayOriginEXT;" "in vec3 gl_WorldRayDirectionNV;" + "in vec3 gl_WorldRayDirectionEXT;" "in vec3 gl_ObjectRayOriginNV;" "in vec3 gl_ObjectRayDirectionNV;" "in float gl_RayTminNV;" + "in float gl_RayTminEXT;" "in float gl_RayTmaxNV;" + "in float gl_RayTmaxEXT;" "in uint gl_IncomingRayFlagsNV;" + "in uint gl_IncomingRayFlagsEXT;" "\n"; const char *callableDecls = "in uvec3 gl_LaunchIDNV;" + "in uvec3 gl_LaunchIDEXT;" "in uvec3 gl_LaunchSizeNV;" + "in uvec3 gl_LaunchSizeEXT;" "\n"; - stageBuiltins[EShLangRayGenNV].append(rayGenDecls); - stageBuiltins[EShLangRayGenNV].append(constRayFlags); - stageBuiltins[EShLangIntersectNV].append(intersectDecls); - stageBuiltins[EShLangIntersectNV].append(constRayFlags); + commonBuiltins.append(constRayQueryIntersection); + commonBuiltins.append(constRayFlags); - stageBuiltins[EShLangAnyHitNV].append(hitDecls); - stageBuiltins[EShLangAnyHitNV].append(constRayFlags); - - stageBuiltins[EShLangClosestHitNV].append(hitDecls); - stageBuiltins[EShLangClosestHitNV].append(constRayFlags); - - stageBuiltins[EShLangMissNV].append(missDecls); - stageBuiltins[EShLangMissNV].append(constRayFlags); - - stageBuiltins[EShLangCallableNV].append(callableDecls); - stageBuiltins[EShLangCallableNV].append(constRayFlags); + stageBuiltins[EShLangRayGen].append(rayGenDecls); + stageBuiltins[EShLangIntersect].append(intersectDecls); + stageBuiltins[EShLangAnyHit].append(hitDecls); + stageBuiltins[EShLangClosestHit].append(hitDecls); + stageBuiltins[EShLangMiss].append(missDecls); + stageBuiltins[EShLangCallable].append(callableDecls); } + if ((profile != EEsProfile && version >= 140)) { const char *deviceIndex = "in highp int gl_DeviceIndex;" // GL_EXT_device_group "\n"; - stageBuiltins[EShLangRayGenNV].append(deviceIndex); - stageBuiltins[EShLangIntersectNV].append(deviceIndex); - stageBuiltins[EShLangAnyHitNV].append(deviceIndex); - stageBuiltins[EShLangClosestHitNV].append(deviceIndex); - stageBuiltins[EShLangMissNV].append(deviceIndex); - } - - if (version >= 300 /* both ES and non-ES */) { - stageBuiltins[EShLangFragment].append( - "flat in highp uint gl_ViewID_OVR;" // GL_OVR_multiview, GL_OVR_multiview2 - "\n"); + stageBuiltins[EShLangRayGen].append(deviceIndex); + stageBuiltins[EShLangIntersect].append(deviceIndex); + stageBuiltins[EShLangAnyHit].append(deviceIndex); + stageBuiltins[EShLangClosestHit].append(deviceIndex); + stageBuiltins[EShLangMiss].append(deviceIndex); } if ((profile != EEsProfile && version >= 420) || @@ -5512,6 +5668,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV commonBuiltins.append("const int gl_ScopeSubgroup = 3;\n"); commonBuiltins.append("const int gl_ScopeInvocation = 4;\n"); commonBuiltins.append("const int gl_ScopeQueueFamily = 5;\n"); + commonBuiltins.append("const int gl_ScopeShaderCallEXT = 6;\n"); commonBuiltins.append("const int gl_SemanticsRelaxed = 0x0;\n"); commonBuiltins.append("const int gl_SemanticsAcquire = 0x2;\n"); @@ -5527,7 +5684,15 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV commonBuiltins.append("const int gl_StorageSemanticsImage = 0x800;\n"); commonBuiltins.append("const int gl_StorageSemanticsOutput = 0x1000;\n"); } -#endif + +#endif // !GLSLANG_ANGLE + + if (version >= 300 /* both ES and non-ES */) { + stageBuiltins[EShLangFragment].append( + "flat in highp uint gl_ViewID_OVR;" // GL_OVR_multiview, GL_OVR_multiview2 + "\n"); + } +#endif // !GLSLANG_WEB // printf("%s\n", commonBuiltins.c_str()); // printf("%s\n", stageBuiltins[EShLangFragment].c_str()); @@ -5545,13 +5710,16 @@ void TBuiltIns::add2ndGenerationSamplingImaging(int version, EProfile profile, c // // enumerate all the types + const TBasicType bTypes[] = { EbtFloat, EbtInt, EbtUint, +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) + EbtFloat16 +#endif + }; #ifdef GLSLANG_WEB - const TBasicType bTypes[] = { EbtFloat, EbtInt, EbtUint }; bool skipBuffer = true; bool skipCubeArrayed = true; const int image = 0; #else - const TBasicType bTypes[] = { EbtFloat, EbtInt, EbtUint, EbtFloat16 }; bool skipBuffer = (profile == EEsProfile && version < 310) || (profile != EEsProfile && version < 140); bool skipCubeArrayed = (profile == EEsProfile && version < 310) || (profile != EEsProfile && version < 130); for (int image = 0; image <= 1; ++image) // loop over "bool" image vs sampler @@ -5576,8 +5744,12 @@ void TBuiltIns::add2ndGenerationSamplingImaging(int version, EProfile profile, c for (int arrayed = 0; arrayed <= 1; ++arrayed) { // loop over "bool" arrayed or not #ifdef GLSLANG_WEB for (int dim = Esd2D; dim <= EsdCube; ++dim) { // 2D, 3D, and Cube +#else +#if defined(GLSLANG_ANGLE) + for (int dim = Esd2D; dim < EsdNumDims; ++dim) { // 2D, ..., buffer, subpass #else for (int dim = Esd1D; dim < EsdNumDims; ++dim) { // 1D, ..., buffer, subpass +#endif if (dim == EsdSubpass && spvVersion.vulkan == 0) continue; if (dim == EsdSubpass && (image || shadow || arrayed)) @@ -5708,7 +5880,7 @@ void TBuiltIns::addQueryFunctions(TSampler sampler, const TString& typeName, int return; #endif - if (sampler.isImage() && ((profile == EEsProfile && version < 310) || (profile != EEsProfile && version < 430))) + if (sampler.isImage() && ((profile == EEsProfile && version < 310) || (profile != EEsProfile && version < 420))) return; if (profile == EEsProfile) @@ -5747,9 +5919,9 @@ void TBuiltIns::addQueryFunctions(TSampler sampler, const TString& typeName, int // // textureQueryLod(), fragment stage only - // + // Also enabled with extension GL_ARB_texture_query_lod - if (profile != EEsProfile && version >= 400 && sampler.isCombined() && sampler.dim != EsdRect && + if (profile != EEsProfile && version >= 150 && sampler.isCombined() && sampler.dim != EsdRect && ! sampler.isMultiSample() && ! sampler.isBuffer()) { for (int f16TexAddr = 0; f16TexAddr < 2; ++f16TexAddr) { if (f16TexAddr && sampler.type != EbtFloat16) @@ -5898,12 +6070,39 @@ void TBuiltIns::addImageFunctions(TSampler sampler, const TString& typeName, int // not int or uint // GL_ARB_ES3_1_compatibility // TODO: spec issue: are there restrictions on the kind of layout() that can be used? what about dropping memory qualifiers? - if ((profile != EEsProfile && version >= 450) || - (profile == EEsProfile && version >= 310)) { + if (profile == EEsProfile && version >= 310) { commonBuiltins.append("float imageAtomicExchange(volatile coherent "); commonBuiltins.append(imageParams); commonBuiltins.append(", float);\n"); } + if (profile != EEsProfile && version >= 450) { + commonBuiltins.append("float imageAtomicAdd(volatile coherent "); + commonBuiltins.append(imageParams); + commonBuiltins.append(", float);\n"); + + commonBuiltins.append("float imageAtomicAdd(volatile coherent "); + commonBuiltins.append(imageParams); + commonBuiltins.append(", float"); + commonBuiltins.append(", int, int, int);\n"); + + commonBuiltins.append("float imageAtomicExchange(volatile coherent "); + commonBuiltins.append(imageParams); + commonBuiltins.append(", float);\n"); + + commonBuiltins.append("float imageAtomicExchange(volatile coherent "); + commonBuiltins.append(imageParams); + commonBuiltins.append(", float"); + commonBuiltins.append(", int, int, int);\n"); + + commonBuiltins.append("float imageAtomicLoad(readonly volatile coherent "); + commonBuiltins.append(imageParams); + commonBuiltins.append(", int, int, int);\n"); + + commonBuiltins.append("void imageAtomicStore(writeonly volatile coherent "); + commonBuiltins.append(imageParams); + commonBuiltins.append(", float"); + commonBuiltins.append(", int, int, int);\n"); + } } } @@ -5971,6 +6170,9 @@ void TBuiltIns::addSamplingFunctions(TSampler sampler, const TString& typeName, #ifdef GLSLANG_WEB profile = EEsProfile; version = 310; +#elif defined(GLSLANG_ANGLE) + profile = ECoreProfile; + version = 450; #endif // @@ -6047,7 +6249,7 @@ void TBuiltIns::addSamplingFunctions(TSampler sampler, const TString& typeName, continue; // loop over 16-bit floating-point texel addressing -#ifdef GLSLANG_WEB +#if defined(GLSLANG_WEB) || defined(GLSLANG_ANGLE) const int f16TexAddr = 0; #else for (int f16TexAddr = 0; f16TexAddr <= 1; ++f16TexAddr) @@ -6060,7 +6262,7 @@ void TBuiltIns::addSamplingFunctions(TSampler sampler, const TString& typeName, totalDims--; } // loop over "bool" lod clamp -#ifdef GLSLANG_WEB +#if defined(GLSLANG_WEB) || defined(GLSLANG_ANGLE) const int lodClamp = 0; #else for (int lodClamp = 0; lodClamp <= 1 ;++lodClamp) @@ -6072,7 +6274,7 @@ void TBuiltIns::addSamplingFunctions(TSampler sampler, const TString& typeName, continue; // loop over "bool" sparse or not -#ifdef GLSLANG_WEB +#if defined(GLSLANG_WEB) || defined(GLSLANG_ANGLE) const int sparse = 0; #else for (int sparse = 0; sparse <= 1; ++sparse) @@ -6127,7 +6329,7 @@ void TBuiltIns::addSamplingFunctions(TSampler sampler, const TString& typeName, s.append("Offset"); if (lodClamp) s.append("Clamp"); - if (lodClamp || sparse) + if (lodClamp != 0 || sparse) s.append("ARB"); s.append("("); @@ -6227,7 +6429,7 @@ void TBuiltIns::addSamplingFunctions(TSampler sampler, const TString& typeName, s.append(");\n"); // Add to the per-language set of built-ins - if (bias || lodClamp) { + if (bias || lodClamp != 0) { stageBuiltins[EShLangFragment].append(s); stageBuiltins[EShLangCompute].append(s); } else @@ -6256,6 +6458,9 @@ void TBuiltIns::addGatherFunctions(TSampler sampler, const TString& typeName, in #ifdef GLSLANG_WEB profile = EEsProfile; version = 310; +#elif defined(GLSLANG_ANGLE) + profile = ECoreProfile; + version = 450; #endif switch (sampler.dim) { @@ -6499,6 +6704,9 @@ void TBuiltIns::initialize(const TBuiltInResource &resources, int version, EProf #ifdef GLSLANG_WEB version = 310; profile = EEsProfile; +#elif defined(GLSLANG_ANGLE) + version = 450; + profile = ECoreProfile; #endif // @@ -6624,6 +6832,35 @@ void TBuiltIns::initialize(const TBuiltInResource &resources, int version, EProf } } + if (version >= 320) { + // tessellation + + snprintf(builtInConstant, maxSize, "const int gl_MaxTessControlImageUniforms = %d;", resources.maxTessControlImageUniforms); + s.append(builtInConstant); + snprintf(builtInConstant, maxSize, "const int gl_MaxTessEvaluationImageUniforms = %d;", resources.maxTessEvaluationImageUniforms); + s.append(builtInConstant); + snprintf(builtInConstant, maxSize, "const int gl_MaxTessControlAtomicCounters = %d;", resources.maxTessControlAtomicCounters); + s.append(builtInConstant); + snprintf(builtInConstant, maxSize, "const int gl_MaxTessEvaluationAtomicCounters = %d;", resources.maxTessEvaluationAtomicCounters); + s.append(builtInConstant); + snprintf(builtInConstant, maxSize, "const int gl_MaxTessControlAtomicCounterBuffers = %d;", resources.maxTessControlAtomicCounterBuffers); + s.append(builtInConstant); + snprintf(builtInConstant, maxSize, "const int gl_MaxTessEvaluationAtomicCounterBuffers = %d;", resources.maxTessEvaluationAtomicCounterBuffers); + s.append(builtInConstant); + } + + if (version >= 100) { + // GL_EXT_blend_func_extended + snprintf(builtInConstant, maxSize, "const mediump int gl_MaxDualSourceDrawBuffersEXT = %d;", resources.maxDualSourceDrawBuffersEXT); + s.append(builtInConstant); + // this is here instead of with the others in initialize(version, profile) due to the dependence on gl_MaxDualSourceDrawBuffersEXT + if (language == EShLangFragment) { + s.append( + "mediump vec4 gl_SecondaryFragColorEXT;" + "mediump vec4 gl_SecondaryFragDataEXT[gl_MaxDualSourceDrawBuffersEXT];" + "\n"); + } + } } else { // non-ES profile @@ -6898,6 +7135,7 @@ void TBuiltIns::initialize(const TBuiltInResource &resources, int version, EProf s.append("\n"); } +#ifndef GLSLANG_ANGLE // atomic counters (some in compute below) if ((profile == EEsProfile && version >= 310) || (profile != EEsProfile && version >= 420)) { @@ -6934,6 +7172,7 @@ void TBuiltIns::initialize(const TBuiltInResource &resources, int version, EProf s.append("\n"); } +#endif // !GLSLANG_ANGLE // GL_ARB_cull_distance if (profile != EEsProfile && version >= 450) { @@ -6950,6 +7189,7 @@ void TBuiltIns::initialize(const TBuiltInResource &resources, int version, EProf s.append(builtInConstant); } +#ifndef GLSLANG_ANGLE // SPV_NV_mesh_shader if ((profile != EEsProfile && version >= 450) || (profile == EEsProfile && version >= 320)) { snprintf(builtInConstant, maxSize, "const int gl_MaxMeshOutputVerticesNV = %d;", resources.maxMeshOutputVerticesNV); @@ -6972,6 +7212,7 @@ void TBuiltIns::initialize(const TBuiltInResource &resources, int version, EProf s.append("\n"); } +#endif #endif s.append("\n"); @@ -7056,6 +7297,9 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion #ifdef GLSLANG_WEB version = 310; profile = EEsProfile; +#elif defined(GLSLANG_ANGLE) + version = 450; + profile = ECoreProfile; #endif // @@ -7245,7 +7489,7 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion case EShLangTessEvaluation: case EShLangGeometry: -#endif +#endif // !GLSLANG_WEB SpecialQualifier("gl_Position", EvqPosition, EbvPosition, symbolTable); SpecialQualifier("gl_PointSize", EvqPointSize, EbvPointSize, symbolTable); @@ -7406,7 +7650,7 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion BuiltInVariable("gl_WarpIDNV", EbvWarpID, symbolTable); BuiltInVariable("gl_SMIDNV", EbvSMID, symbolTable); } -#endif +#endif // !GLSLANG_WEB break; case EShLangFragment: @@ -7436,6 +7680,38 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion BuiltInVariable("gl_FragStencilRefARB", EbvFragStencilRef, symbolTable); } + if (profile != EEsProfile && version < 400) { + symbolTable.setFunctionExtensions("textureQueryLod", 1, &E_GL_ARB_texture_query_lod); + } + + if (profile != EEsProfile && version >= 460) { + symbolTable.setFunctionExtensions("rayQueryInitializeEXT", 1, &E_GL_EXT_ray_query); + symbolTable.setFunctionExtensions("rayQueryTerminateEXT", 1, &E_GL_EXT_ray_query); + symbolTable.setFunctionExtensions("rayQueryGenerateIntersectionEXT", 1, &E_GL_EXT_ray_query); + symbolTable.setFunctionExtensions("rayQueryConfirmIntersectionEXT", 1, &E_GL_EXT_ray_query); + symbolTable.setFunctionExtensions("rayQueryProceedEXT", 1, &E_GL_EXT_ray_query); + symbolTable.setFunctionExtensions("rayQueryGetIntersectionTypeEXT", 1, &E_GL_EXT_ray_query); + symbolTable.setFunctionExtensions("rayQueryGetIntersectionTEXT", 1, &E_GL_EXT_ray_query); + symbolTable.setFunctionExtensions("rayQueryGetRayFlagsEXT", 1, &E_GL_EXT_ray_query); + symbolTable.setFunctionExtensions("rayQueryGetRayTMinEXT", 1, &E_GL_EXT_ray_query); + symbolTable.setFunctionExtensions("rayQueryGetIntersectionInstanceCustomIndexEXT", 1, &E_GL_EXT_ray_query); + symbolTable.setFunctionExtensions("rayQueryGetIntersectionInstanceIdEXT", 1, &E_GL_EXT_ray_query); + symbolTable.setFunctionExtensions("rayQueryGetIntersectionInstanceShaderBindingTableRecordOffsetEXT", 1, &E_GL_EXT_ray_query); + symbolTable.setFunctionExtensions("rayQueryGetIntersectionGeometryIndexEXT", 1, &E_GL_EXT_ray_query); + symbolTable.setFunctionExtensions("rayQueryGetIntersectionPrimitiveIndexEXT", 1, &E_GL_EXT_ray_query); + symbolTable.setFunctionExtensions("rayQueryGetIntersectionBarycentricsEXT", 1, &E_GL_EXT_ray_query); + symbolTable.setFunctionExtensions("rayQueryGetIntersectionFrontFaceEXT", 1, &E_GL_EXT_ray_query); + symbolTable.setFunctionExtensions("rayQueryGetIntersectionCandidateAABBOpaqueEXT", 1, &E_GL_EXT_ray_query); + symbolTable.setFunctionExtensions("rayQueryGetIntersectionObjectRayDirectionEXT", 1, &E_GL_EXT_ray_query); + symbolTable.setFunctionExtensions("rayQueryGetIntersectionObjectRayOriginEXT", 1, &E_GL_EXT_ray_query); + symbolTable.setFunctionExtensions("rayQueryGetIntersectionObjectToWorldEXT", 1, &E_GL_EXT_ray_query); + symbolTable.setFunctionExtensions("rayQueryGetIntersectionWorldToObjectEXT", 1, &E_GL_EXT_ray_query); + symbolTable.setFunctionExtensions("rayQueryGetWorldRayOriginEXT", 1, &E_GL_EXT_ray_query); + symbolTable.setFunctionExtensions("rayQueryGetWorldRayDirectionEXT", 1, &E_GL_EXT_ray_query); + symbolTable.setVariableExtensions("gl_RayFlagsSkipAABBEXT", 1, &E_GL_EXT_ray_flags_primitive_culling); + symbolTable.setVariableExtensions("gl_RayFlagsSkipTrianglesEXT", 1, &E_GL_EXT_ray_flags_primitive_culling); + } + if ((profile != EEsProfile && version >= 130) || (profile == EEsProfile && version >= 310)) { BuiltInVariable("gl_SampleID", EbvSampleId, symbolTable); @@ -7674,6 +7950,45 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion symbolTable.setFunctionExtensions("imageAtomicCompSwap", 1, &E_GL_OES_shader_image_atomic); } + if (profile != EEsProfile && version < 330 ) { + symbolTable.setFunctionExtensions("floatBitsToInt", 1, &E_GL_ARB_shader_bit_encoding); + symbolTable.setFunctionExtensions("floatBitsToUint", 1, &E_GL_ARB_shader_bit_encoding); + symbolTable.setFunctionExtensions("intBitsToFloat", 1, &E_GL_ARB_shader_bit_encoding); + symbolTable.setFunctionExtensions("uintBitsToFloat", 1, &E_GL_ARB_shader_bit_encoding); + } + + if (profile != EEsProfile && version < 430 ) { + symbolTable.setFunctionExtensions("imageSize", 1, &E_GL_ARB_shader_image_size); + } + + // GL_ARB_shader_storage_buffer_object + if (profile != EEsProfile && version < 430 ) { + symbolTable.setFunctionExtensions("atomicAdd", 1, &E_GL_ARB_shader_storage_buffer_object); + symbolTable.setFunctionExtensions("atomicMin", 1, &E_GL_ARB_shader_storage_buffer_object); + symbolTable.setFunctionExtensions("atomicMax", 1, &E_GL_ARB_shader_storage_buffer_object); + symbolTable.setFunctionExtensions("atomicAnd", 1, &E_GL_ARB_shader_storage_buffer_object); + symbolTable.setFunctionExtensions("atomicOr", 1, &E_GL_ARB_shader_storage_buffer_object); + symbolTable.setFunctionExtensions("atomicXor", 1, &E_GL_ARB_shader_storage_buffer_object); + symbolTable.setFunctionExtensions("atomicExchange", 1, &E_GL_ARB_shader_storage_buffer_object); + symbolTable.setFunctionExtensions("atomicCompSwap", 1, &E_GL_ARB_shader_storage_buffer_object); + } + + // GL_ARB_shading_language_packing + if (profile != EEsProfile && version < 400 ) { + symbolTable.setFunctionExtensions("packUnorm2x16", 1, &E_GL_ARB_shading_language_packing); + symbolTable.setFunctionExtensions("unpackUnorm2x16", 1, &E_GL_ARB_shading_language_packing); + symbolTable.setFunctionExtensions("packSnorm4x8", 1, &E_GL_ARB_shading_language_packing); + symbolTable.setFunctionExtensions("packUnorm4x8", 1, &E_GL_ARB_shading_language_packing); + symbolTable.setFunctionExtensions("unpackSnorm4x8", 1, &E_GL_ARB_shading_language_packing); + symbolTable.setFunctionExtensions("unpackUnorm4x8", 1, &E_GL_ARB_shading_language_packing); + } + if (profile != EEsProfile && version < 420 ) { + symbolTable.setFunctionExtensions("packSnorm2x16", 1, &E_GL_ARB_shading_language_packing); + symbolTable.setFunctionExtensions("unpackSnorm2x16", 1, &E_GL_ARB_shading_language_packing); + symbolTable.setFunctionExtensions("unpackHalf2x16", 1, &E_GL_ARB_shading_language_packing); + symbolTable.setFunctionExtensions("packHalf2x16", 1, &E_GL_ARB_shading_language_packing); + } + symbolTable.setVariableExtensions("gl_DeviceIndex", 1, &E_GL_EXT_device_group); BuiltInVariable("gl_DeviceIndex", EbvDeviceIndex, symbolTable); symbolTable.setVariableExtensions("gl_ViewIndex", 1, &E_GL_EXT_multiview); @@ -7841,7 +8156,7 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion } symbolTable.setFunctionExtensions("helperInvocationEXT", 1, &E_GL_EXT_demote_to_helper_invocation); -#endif +#endif // !GLSLANG_WEB break; case EShLangCompute: @@ -7885,7 +8200,9 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion symbolTable.setFunctionExtensions("groupMemoryBarrier", 1, &E_GL_ARB_compute_shader); } + symbolTable.setFunctionExtensions("controlBarrier", 1, &E_GL_KHR_memory_scope_semantics); + symbolTable.setFunctionExtensions("debugPrintfEXT", 1, &E_GL_EXT_debug_printf); // GL_ARB_shader_ballot if (profile != EEsProfile) { @@ -7971,53 +8288,101 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion symbolTable.setFunctionExtensions("dFdyCoarse", 1, &E_GL_NV_compute_shader_derivatives); symbolTable.setFunctionExtensions("fwidthCoarse", 1, &E_GL_NV_compute_shader_derivatives); } -#endif +#endif // !GLSLANG_WEB break; -#ifndef GLSLANG_WEB - case EShLangRayGenNV: - case EShLangIntersectNV: - case EShLangAnyHitNV: - case EShLangClosestHitNV: - case EShLangMissNV: - case EShLangCallableNV: +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) + case EShLangRayGen: + case EShLangIntersect: + case EShLangAnyHit: + case EShLangClosestHit: + case EShLangMiss: + case EShLangCallable: if (profile != EEsProfile && version >= 460) { + const char *rtexts[] = { E_GL_NV_ray_tracing, E_GL_EXT_ray_tracing }; symbolTable.setVariableExtensions("gl_LaunchIDNV", 1, &E_GL_NV_ray_tracing); + symbolTable.setVariableExtensions("gl_LaunchIDEXT", 1, &E_GL_EXT_ray_tracing); symbolTable.setVariableExtensions("gl_LaunchSizeNV", 1, &E_GL_NV_ray_tracing); - symbolTable.setVariableExtensions("gl_PrimitiveID", 1, &E_GL_NV_ray_tracing); - symbolTable.setVariableExtensions("gl_InstanceID", 1, &E_GL_NV_ray_tracing); + symbolTable.setVariableExtensions("gl_LaunchSizeEXT", 1, &E_GL_EXT_ray_tracing); + symbolTable.setVariableExtensions("gl_PrimitiveID", 2, rtexts); + symbolTable.setVariableExtensions("gl_InstanceID", 2, rtexts); symbolTable.setVariableExtensions("gl_InstanceCustomIndexNV", 1, &E_GL_NV_ray_tracing); + symbolTable.setVariableExtensions("gl_InstanceCustomIndexEXT", 1, &E_GL_EXT_ray_tracing); + symbolTable.setVariableExtensions("gl_GeometryIndexEXT", 1, &E_GL_EXT_ray_tracing); symbolTable.setVariableExtensions("gl_WorldRayOriginNV", 1, &E_GL_NV_ray_tracing); + symbolTable.setVariableExtensions("gl_WorldRayOriginEXT", 1, &E_GL_EXT_ray_tracing); symbolTable.setVariableExtensions("gl_WorldRayDirectionNV", 1, &E_GL_NV_ray_tracing); + symbolTable.setVariableExtensions("gl_WorldRayDirectionEXT", 1, &E_GL_EXT_ray_tracing); symbolTable.setVariableExtensions("gl_ObjectRayOriginNV", 1, &E_GL_NV_ray_tracing); + symbolTable.setVariableExtensions("gl_ObjectRayOriginEXT", 1, &E_GL_EXT_ray_tracing); symbolTable.setVariableExtensions("gl_ObjectRayDirectionNV", 1, &E_GL_NV_ray_tracing); + symbolTable.setVariableExtensions("gl_ObjectRayDirectionEXT", 1, &E_GL_EXT_ray_tracing); symbolTable.setVariableExtensions("gl_RayTminNV", 1, &E_GL_NV_ray_tracing); + symbolTable.setVariableExtensions("gl_RayTminEXT", 1, &E_GL_EXT_ray_tracing); symbolTable.setVariableExtensions("gl_RayTmaxNV", 1, &E_GL_NV_ray_tracing); + symbolTable.setVariableExtensions("gl_RayTmaxEXT", 1, &E_GL_EXT_ray_tracing); symbolTable.setVariableExtensions("gl_HitTNV", 1, &E_GL_NV_ray_tracing); + symbolTable.setVariableExtensions("gl_HitTEXT", 1, &E_GL_EXT_ray_tracing); symbolTable.setVariableExtensions("gl_HitKindNV", 1, &E_GL_NV_ray_tracing); + symbolTable.setVariableExtensions("gl_HitKindEXT", 1, &E_GL_EXT_ray_tracing); symbolTable.setVariableExtensions("gl_ObjectToWorldNV", 1, &E_GL_NV_ray_tracing); + symbolTable.setVariableExtensions("gl_ObjectToWorldEXT", 1, &E_GL_EXT_ray_tracing); + symbolTable.setVariableExtensions("gl_ObjectToWorld3x4EXT", 1, &E_GL_EXT_ray_tracing); symbolTable.setVariableExtensions("gl_WorldToObjectNV", 1, &E_GL_NV_ray_tracing); + symbolTable.setVariableExtensions("gl_WorldToObjectEXT", 1, &E_GL_EXT_ray_tracing); + symbolTable.setVariableExtensions("gl_WorldToObject3x4EXT", 1, &E_GL_EXT_ray_tracing); symbolTable.setVariableExtensions("gl_IncomingRayFlagsNV", 1, &E_GL_NV_ray_tracing); + symbolTable.setVariableExtensions("gl_IncomingRayFlagsEXT", 1, &E_GL_EXT_ray_tracing); symbolTable.setVariableExtensions("gl_DeviceIndex", 1, &E_GL_EXT_device_group); - BuiltInVariable("gl_LaunchIDNV", EbvLaunchIdNV, symbolTable); - BuiltInVariable("gl_LaunchSizeNV", EbvLaunchSizeNV, symbolTable); - BuiltInVariable("gl_PrimitiveID", EbvPrimitiveId, symbolTable); - BuiltInVariable("gl_InstanceID", EbvInstanceId, symbolTable); - BuiltInVariable("gl_InstanceCustomIndexNV", EbvInstanceCustomIndexNV,symbolTable); - BuiltInVariable("gl_WorldRayOriginNV", EbvWorldRayOriginNV, symbolTable); - BuiltInVariable("gl_WorldRayDirectionNV", EbvWorldRayDirectionNV, symbolTable); - BuiltInVariable("gl_ObjectRayOriginNV", EbvObjectRayOriginNV, symbolTable); - BuiltInVariable("gl_ObjectRayDirectionNV", EbvObjectRayDirectionNV, symbolTable); - BuiltInVariable("gl_RayTminNV", EbvRayTminNV, symbolTable); - BuiltInVariable("gl_RayTmaxNV", EbvRayTmaxNV, symbolTable); - BuiltInVariable("gl_HitTNV", EbvHitTNV, symbolTable); - BuiltInVariable("gl_HitKindNV", EbvHitKindNV, symbolTable); - BuiltInVariable("gl_ObjectToWorldNV", EbvObjectToWorldNV, symbolTable); - BuiltInVariable("gl_WorldToObjectNV", EbvWorldToObjectNV, symbolTable); - BuiltInVariable("gl_IncomingRayFlagsNV", EbvIncomingRayFlagsNV, symbolTable); - BuiltInVariable("gl_DeviceIndex", EbvDeviceIndex, symbolTable); + + symbolTable.setFunctionExtensions("traceNV", 1, &E_GL_NV_ray_tracing); + symbolTable.setFunctionExtensions("traceRayEXT", 1, &E_GL_EXT_ray_tracing); + symbolTable.setFunctionExtensions("reportIntersectionNV", 1, &E_GL_NV_ray_tracing); + symbolTable.setFunctionExtensions("reportIntersectionEXT", 1, &E_GL_EXT_ray_tracing); + symbolTable.setFunctionExtensions("ignoreIntersectionNV", 1, &E_GL_NV_ray_tracing); + symbolTable.setFunctionExtensions("ignoreIntersectionEXT", 1, &E_GL_EXT_ray_tracing); + symbolTable.setFunctionExtensions("terminateRayNV", 1, &E_GL_NV_ray_tracing); + symbolTable.setFunctionExtensions("terminateRayEXT", 1, &E_GL_EXT_ray_tracing); + symbolTable.setFunctionExtensions("executeCallableNV", 1, &E_GL_NV_ray_tracing); + symbolTable.setFunctionExtensions("executeCallableEXT", 1, &E_GL_EXT_ray_tracing); + + + BuiltInVariable("gl_LaunchIDNV", EbvLaunchId, symbolTable); + BuiltInVariable("gl_LaunchIDEXT", EbvLaunchId, symbolTable); + BuiltInVariable("gl_LaunchSizeNV", EbvLaunchSize, symbolTable); + BuiltInVariable("gl_LaunchSizeEXT", EbvLaunchSize, symbolTable); + BuiltInVariable("gl_PrimitiveID", EbvPrimitiveId, symbolTable); + BuiltInVariable("gl_InstanceID", EbvInstanceId, symbolTable); + BuiltInVariable("gl_InstanceCustomIndexNV", EbvInstanceCustomIndex,symbolTable); + BuiltInVariable("gl_InstanceCustomIndexEXT", EbvInstanceCustomIndex,symbolTable); + BuiltInVariable("gl_GeometryIndexEXT", EbvGeometryIndex, symbolTable); + BuiltInVariable("gl_WorldRayOriginNV", EbvWorldRayOrigin, symbolTable); + BuiltInVariable("gl_WorldRayOriginEXT", EbvWorldRayOrigin, symbolTable); + BuiltInVariable("gl_WorldRayDirectionNV", EbvWorldRayDirection, symbolTable); + BuiltInVariable("gl_WorldRayDirectionEXT", EbvWorldRayDirection, symbolTable); + BuiltInVariable("gl_ObjectRayOriginNV", EbvObjectRayOrigin, symbolTable); + BuiltInVariable("gl_ObjectRayOriginEXT", EbvObjectRayOrigin, symbolTable); + BuiltInVariable("gl_ObjectRayDirectionNV", EbvObjectRayDirection, symbolTable); + BuiltInVariable("gl_ObjectRayDirectionEXT", EbvObjectRayDirection, symbolTable); + BuiltInVariable("gl_RayTminNV", EbvRayTmin, symbolTable); + BuiltInVariable("gl_RayTminEXT", EbvRayTmin, symbolTable); + BuiltInVariable("gl_RayTmaxNV", EbvRayTmax, symbolTable); + BuiltInVariable("gl_RayTmaxEXT", EbvRayTmax, symbolTable); + BuiltInVariable("gl_HitTNV", EbvHitT, symbolTable); + BuiltInVariable("gl_HitTEXT", EbvHitT, symbolTable); + BuiltInVariable("gl_HitKindNV", EbvHitKind, symbolTable); + BuiltInVariable("gl_HitKindEXT", EbvHitKind, symbolTable); + BuiltInVariable("gl_ObjectToWorldNV", EbvObjectToWorld, symbolTable); + BuiltInVariable("gl_ObjectToWorldEXT", EbvObjectToWorld, symbolTable); + BuiltInVariable("gl_ObjectToWorld3x4EXT", EbvObjectToWorld3x4, symbolTable); + BuiltInVariable("gl_WorldToObjectNV", EbvWorldToObject, symbolTable); + BuiltInVariable("gl_WorldToObjectEXT", EbvWorldToObject, symbolTable); + BuiltInVariable("gl_WorldToObject3x4EXT", EbvWorldToObject3x4, symbolTable); + BuiltInVariable("gl_IncomingRayFlagsNV", EbvIncomingRayFlags, symbolTable); + BuiltInVariable("gl_IncomingRayFlagsEXT", EbvIncomingRayFlags, symbolTable); + BuiltInVariable("gl_DeviceIndex", EbvDeviceIndex, symbolTable); // GL_ARB_shader_ballot symbolTable.setVariableExtensions("gl_SubGroupSizeARB", 1, &E_GL_ARB_shader_ballot); @@ -8434,6 +8799,8 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion symbolTable.relateToOperator("average", EOpAverage); symbolTable.relateToOperator("averageRounded", EOpAverageRounded); symbolTable.relateToOperator("multiply32x16", EOpMul32x16); + symbolTable.relateToOperator("debugPrintfEXT", EOpDebugPrintf); + if (PureOperatorBuiltins) { symbolTable.relateToOperator("imageSize", EOpImageQuerySize); @@ -8723,6 +9090,33 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion symbolTable.relateToOperator("dFdyCoarse", EOpDPdyCoarse); symbolTable.relateToOperator("fwidthCoarse", EOpFwidthCoarse); } + + if (profile != EEsProfile && version >= 460) { + symbolTable.relateToOperator("rayQueryInitializeEXT", EOpRayQueryInitialize); + symbolTable.relateToOperator("rayQueryTerminateEXT", EOpRayQueryTerminate); + symbolTable.relateToOperator("rayQueryGenerateIntersectionEXT", EOpRayQueryGenerateIntersection); + symbolTable.relateToOperator("rayQueryConfirmIntersectionEXT", EOpRayQueryConfirmIntersection); + symbolTable.relateToOperator("rayQueryProceedEXT", EOpRayQueryProceed); + symbolTable.relateToOperator("rayQueryGetIntersectionTypeEXT", EOpRayQueryGetIntersectionType); + symbolTable.relateToOperator("rayQueryGetRayTMinEXT", EOpRayQueryGetRayTMin); + symbolTable.relateToOperator("rayQueryGetRayFlagsEXT", EOpRayQueryGetRayFlags); + symbolTable.relateToOperator("rayQueryGetIntersectionTEXT", EOpRayQueryGetIntersectionT); + symbolTable.relateToOperator("rayQueryGetIntersectionInstanceCustomIndexEXT", EOpRayQueryGetIntersectionInstanceCustomIndex); + symbolTable.relateToOperator("rayQueryGetIntersectionInstanceIdEXT", EOpRayQueryGetIntersectionInstanceId); + symbolTable.relateToOperator("rayQueryGetIntersectionInstanceShaderBindingTableRecordOffsetEXT", EOpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffset); + symbolTable.relateToOperator("rayQueryGetIntersectionGeometryIndexEXT", EOpRayQueryGetIntersectionGeometryIndex); + symbolTable.relateToOperator("rayQueryGetIntersectionPrimitiveIndexEXT", EOpRayQueryGetIntersectionPrimitiveIndex); + symbolTable.relateToOperator("rayQueryGetIntersectionBarycentricsEXT", EOpRayQueryGetIntersectionBarycentrics); + symbolTable.relateToOperator("rayQueryGetIntersectionFrontFaceEXT", EOpRayQueryGetIntersectionFrontFace); + symbolTable.relateToOperator("rayQueryGetIntersectionCandidateAABBOpaqueEXT", EOpRayQueryGetIntersectionCandidateAABBOpaque); + symbolTable.relateToOperator("rayQueryGetIntersectionObjectRayDirectionEXT", EOpRayQueryGetIntersectionObjectRayDirection); + symbolTable.relateToOperator("rayQueryGetIntersectionObjectRayOriginEXT", EOpRayQueryGetIntersectionObjectRayOrigin); + symbolTable.relateToOperator("rayQueryGetWorldRayDirectionEXT", EOpRayQueryGetWorldRayDirection); + symbolTable.relateToOperator("rayQueryGetWorldRayOriginEXT", EOpRayQueryGetWorldRayOrigin); + symbolTable.relateToOperator("rayQueryGetIntersectionObjectToWorldEXT", EOpRayQueryGetIntersectionObjectToWorld); + symbolTable.relateToOperator("rayQueryGetIntersectionWorldToObjectEXT", EOpRayQueryGetIntersectionWorldToObject); + } + symbolTable.relateToOperator("interpolateAtCentroid", EOpInterpolateAtCentroid); symbolTable.relateToOperator("interpolateAtSample", EOpInterpolateAtSample); symbolTable.relateToOperator("interpolateAtOffset", EOpInterpolateAtOffset); @@ -8754,27 +9148,34 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion symbolTable.relateToOperator("coopMatMulAddNV", EOpCooperativeMatrixMulAdd); break; - case EShLangRayGenNV: - case EShLangClosestHitNV: - case EShLangMissNV: + case EShLangRayGen: + case EShLangClosestHit: + case EShLangMiss: if (profile != EEsProfile && version >= 460) { - symbolTable.relateToOperator("traceNV", EOpTraceNV); - symbolTable.relateToOperator("executeCallableNV", EOpExecuteCallableNV); + symbolTable.relateToOperator("traceNV", EOpTrace); + symbolTable.relateToOperator("traceRayEXT", EOpTrace); + symbolTable.relateToOperator("executeCallableNV", EOpExecuteCallable); + symbolTable.relateToOperator("executeCallableEXT", EOpExecuteCallable); } break; - case EShLangIntersectNV: - if (profile != EEsProfile && version >= 460) - symbolTable.relateToOperator("reportIntersectionNV", EOpReportIntersectionNV); - break; - case EShLangAnyHitNV: + case EShLangIntersect: if (profile != EEsProfile && version >= 460) { - symbolTable.relateToOperator("ignoreIntersectionNV", EOpIgnoreIntersectionNV); - symbolTable.relateToOperator("terminateRayNV", EOpTerminateRayNV); + symbolTable.relateToOperator("reportIntersectionNV", EOpReportIntersection); + symbolTable.relateToOperator("reportIntersectionEXT", EOpReportIntersection); + } + break; + case EShLangAnyHit: + if (profile != EEsProfile && version >= 460) { + symbolTable.relateToOperator("ignoreIntersectionNV", EOpIgnoreIntersection); + symbolTable.relateToOperator("ignoreIntersectionEXT", EOpIgnoreIntersection); + symbolTable.relateToOperator("terminateRayNV", EOpTerminateRay); + symbolTable.relateToOperator("terminateRayEXT", EOpTerminateRay); } break; - case EShLangCallableNV: + case EShLangCallable: if (profile != EEsProfile && version >= 460) { - symbolTable.relateToOperator("executeCallableNV", EOpExecuteCallableNV); + symbolTable.relateToOperator("executeCallableNV", EOpExecuteCallable); + symbolTable.relateToOperator("executeCallableEXT", EOpExecuteCallable); } break; case EShLangMeshNV: @@ -8793,7 +9194,7 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion default: assert(false && "Language not supported"); } -#endif +#endif // !GLSLANG_WEB } // @@ -8808,6 +9209,10 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion& spvVersion, EShLanguage language, TSymbolTable& symbolTable, const TBuiltInResource &resources) { #ifndef GLSLANG_WEB +#if defined(GLSLANG_ANGLE) + profile = ECoreProfile; + version = 450; +#endif if (profile != EEsProfile && version >= 430 && version < 440) { symbolTable.setVariableExtensions("gl_MaxTransformFeedbackBuffers", 1, &E_GL_ARB_enhanced_layouts); symbolTable.setVariableExtensions("gl_MaxTransformFeedbackInterleavedComponents", 1, &E_GL_ARB_enhanced_layouts); @@ -8831,6 +9236,16 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion symbolTable.insert(*new TVariable(NewPoolTString("gl_FragData"), fragData)); SpecialQualifier("gl_FragData", EvqFragColor, EbvFragData, symbolTable); } + + // GL_EXT_blend_func_extended + if (profile == EEsProfile && version >= 100) { + symbolTable.setVariableExtensions("gl_MaxDualSourceDrawBuffersEXT", 1, &E_GL_EXT_blend_func_extended); + symbolTable.setVariableExtensions("gl_SecondaryFragColorEXT", 1, &E_GL_EXT_blend_func_extended); + symbolTable.setVariableExtensions("gl_SecondaryFragDataEXT", 1, &E_GL_EXT_blend_func_extended); + SpecialQualifier("gl_SecondaryFragColorEXT", EvqVaryingOut, EbvSecondaryFragColorEXT, symbolTable); + SpecialQualifier("gl_SecondaryFragDataEXT", EvqVaryingOut, EbvSecondaryFragDataEXT, symbolTable); + } + break; case EShLangTessControl: diff --git a/thirdparty/glslang/glslang/MachineIndependent/Intermediate.cpp b/thirdparty/glslang/glslang/MachineIndependent/Intermediate.cpp old mode 100755 new mode 100644 index d0f86e63894..b8c220d781b --- a/thirdparty/glslang/glslang/MachineIndependent/Intermediate.cpp +++ b/thirdparty/glslang/glslang/MachineIndependent/Intermediate.cpp @@ -1,7 +1,7 @@ // // Copyright (C) 2002-2005 3Dlabs Inc. Ltd. // Copyright (C) 2012-2015 LunarG, Inc. -// Copyright (C) 2015-2018 Google, Inc. +// Copyright (C) 2015-2020 Google, Inc. // Copyright (C) 2017 ARM Limited. // // All rights reserved. @@ -113,14 +113,14 @@ TIntermSymbol* TIntermediate::addSymbol(const TType& type, const TSourceLoc& loc // // Returns nullptr if the working conversions and promotions could not be found. // -TIntermTyped* TIntermediate::addBinaryMath(TOperator op, TIntermTyped* left, TIntermTyped* right, TSourceLoc loc) +TIntermTyped* TIntermediate::addBinaryMath(TOperator op, TIntermTyped* left, TIntermTyped* right, const TSourceLoc& loc) { // No operations work on blocks if (left->getType().getBasicType() == EbtBlock || right->getType().getBasicType() == EbtBlock) return nullptr; // Convert "reference +/- int" and "reference - reference" to integer math - if ((op == EOpAdd || op == EOpSub) && extensionRequested(E_GL_EXT_buffer_reference2)) { + if (op == EOpAdd || op == EOpSub) { // No addressing math on struct with unsized array. if ((left->isReference() && left->getType().getReferentType()->containsUnsizedArray()) || @@ -140,43 +140,44 @@ TIntermTyped* TIntermediate::addBinaryMath(TOperator op, TIntermTyped* left, TIn node = addBuiltInFunctionCall(loc, EOpConvUint64ToPtr, true, node, referenceType); return node; } - - if (op == EOpAdd && right->isReference() && isTypeInt(left->getBasicType())) { - const TType& referenceType = right->getType(); - TIntermConstantUnion* size = addConstantUnion((unsigned long long)computeBufferReferenceTypeSize(right->getType()), loc, true); - right = addBuiltInFunctionCall(loc, EOpConvPtrToUint64, true, right, TType(EbtUint64)); - - left = createConversion(EbtInt64, left); - left = addBinaryMath(EOpMul, left, size, loc); - - TIntermTyped *node = addBinaryMath(op, left, right, loc); - node = addBuiltInFunctionCall(loc, EOpConvUint64ToPtr, true, node, referenceType); - return node; - } - - if (op == EOpSub && left->isReference() && right->isReference()) { - TIntermConstantUnion* size = addConstantUnion((long long)computeBufferReferenceTypeSize(left->getType()), loc, true); - - left = addBuiltInFunctionCall(loc, EOpConvPtrToUint64, true, left, TType(EbtUint64)); - right = addBuiltInFunctionCall(loc, EOpConvPtrToUint64, true, right, TType(EbtUint64)); - - left = addBuiltInFunctionCall(loc, EOpConvUint64ToInt64, true, left, TType(EbtInt64)); - right = addBuiltInFunctionCall(loc, EOpConvUint64ToInt64, true, right, TType(EbtInt64)); - - left = addBinaryMath(EOpSub, left, right, loc); - - TIntermTyped *node = addBinaryMath(EOpDiv, left, size, loc); - return node; - } - - // No other math operators supported on references - if (left->isReference() || right->isReference()) { - return nullptr; - } } + if (op == EOpAdd && right->isReference() && isTypeInt(left->getBasicType())) { + const TType& referenceType = right->getType(); + TIntermConstantUnion* size = + addConstantUnion((unsigned long long)computeBufferReferenceTypeSize(right->getType()), loc, true); + right = addBuiltInFunctionCall(loc, EOpConvPtrToUint64, true, right, TType(EbtUint64)); + + left = createConversion(EbtInt64, left); + left = addBinaryMath(EOpMul, left, size, loc); + + TIntermTyped *node = addBinaryMath(op, left, right, loc); + node = addBuiltInFunctionCall(loc, EOpConvUint64ToPtr, true, node, referenceType); + return node; + } + + if (op == EOpSub && left->isReference() && right->isReference()) { + TIntermConstantUnion* size = + addConstantUnion((long long)computeBufferReferenceTypeSize(left->getType()), loc, true); + + left = addBuiltInFunctionCall(loc, EOpConvPtrToUint64, true, left, TType(EbtUint64)); + right = addBuiltInFunctionCall(loc, EOpConvPtrToUint64, true, right, TType(EbtUint64)); + + left = addBuiltInFunctionCall(loc, EOpConvUint64ToInt64, true, left, TType(EbtInt64)); + right = addBuiltInFunctionCall(loc, EOpConvUint64ToInt64, true, right, TType(EbtInt64)); + + left = addBinaryMath(EOpSub, left, right, loc); + + TIntermTyped *node = addBinaryMath(EOpDiv, left, size, loc); + return node; + } + + // No other math operators supported on references + if (left->isReference() || right->isReference()) + return nullptr; + // Try converting the children's base types to compatible types. - auto children = addConversion(op, left, right); + auto children = addPairConversion(op, left, right); left = std::get<0>(children); right = std::get<1>(children); @@ -226,13 +227,12 @@ TIntermTyped* TIntermediate::addBinaryMath(TOperator op, TIntermTyped* left, TIn // // Low level: add binary node (no promotions or other argument modifications) // -TIntermBinary* TIntermediate::addBinaryNode(TOperator op, TIntermTyped* left, TIntermTyped* right, TSourceLoc loc) const +TIntermBinary* TIntermediate::addBinaryNode(TOperator op, TIntermTyped* left, TIntermTyped* right, + const TSourceLoc& loc) const { // build the node TIntermBinary* node = new TIntermBinary(op); - if (loc.line == 0) - loc = left->getLoc(); - node->setLoc(loc); + node->setLoc(loc.line != 0 ? loc : left->getLoc()); node->setLeft(left); node->setRight(right); @@ -242,7 +242,8 @@ TIntermBinary* TIntermediate::addBinaryNode(TOperator op, TIntermTyped* left, TI // // like non-type form, but sets node's type. // -TIntermBinary* TIntermediate::addBinaryNode(TOperator op, TIntermTyped* left, TIntermTyped* right, TSourceLoc loc, const TType& type) const +TIntermBinary* TIntermediate::addBinaryNode(TOperator op, TIntermTyped* left, TIntermTyped* right, + const TSourceLoc& loc, const TType& type) const { TIntermBinary* node = addBinaryNode(op, left, right, loc); node->setType(type); @@ -252,12 +253,10 @@ TIntermBinary* TIntermediate::addBinaryNode(TOperator op, TIntermTyped* left, TI // // Low level: add unary node (no promotions or other argument modifications) // -TIntermUnary* TIntermediate::addUnaryNode(TOperator op, TIntermTyped* child, TSourceLoc loc) const +TIntermUnary* TIntermediate::addUnaryNode(TOperator op, TIntermTyped* child, const TSourceLoc& loc) const { TIntermUnary* node = new TIntermUnary(op); - if (loc.line == 0) - loc = child->getLoc(); - node->setLoc(loc); + node->setLoc(loc.line != 0 ? loc : child->getLoc()); node->setOperand(child); return node; @@ -266,7 +265,8 @@ TIntermUnary* TIntermediate::addUnaryNode(TOperator op, TIntermTyped* child, TSo // // like non-type form, but sets node's type. // -TIntermUnary* TIntermediate::addUnaryNode(TOperator op, TIntermTyped* child, TSourceLoc loc, const TType& type) const +TIntermUnary* TIntermediate::addUnaryNode(TOperator op, TIntermTyped* child, const TSourceLoc& loc, const TType& type) + const { TIntermUnary* node = addUnaryNode(op, child, loc); node->setType(type); @@ -281,7 +281,8 @@ TIntermUnary* TIntermediate::addUnaryNode(TOperator op, TIntermTyped* child, TSo // Returns nullptr if the 'right' type could not be converted to match the 'left' type, // or the resulting operation cannot be properly promoted. // -TIntermTyped* TIntermediate::addAssign(TOperator op, TIntermTyped* left, TIntermTyped* right, TSourceLoc loc) +TIntermTyped* TIntermediate::addAssign(TOperator op, TIntermTyped* left, TIntermTyped* right, + const TSourceLoc& loc) { // No block assignment if (left->getType().getBasicType() == EbtBlock || right->getType().getBasicType() == EbtBlock) @@ -290,9 +291,7 @@ TIntermTyped* TIntermediate::addAssign(TOperator op, TIntermTyped* left, TInterm // Convert "reference += int" to "reference = reference + int". We need this because the // "reference + int" calculation involves a cast back to the original type, which makes it // not an lvalue. - if ((op == EOpAddAssign || op == EOpSubAssign) && left->isReference() && - extensionRequested(E_GL_EXT_buffer_reference2)) { - + if ((op == EOpAddAssign || op == EOpSubAssign) && left->isReference()) { if (!(right->getType().isScalar() && right->getType().isIntegerDomain())) return nullptr; @@ -338,7 +337,8 @@ TIntermTyped* TIntermediate::addAssign(TOperator op, TIntermTyped* left, TInterm // Returns the added node. // The caller should set the type of the returned node. // -TIntermTyped* TIntermediate::addIndex(TOperator op, TIntermTyped* base, TIntermTyped* index, TSourceLoc loc) +TIntermTyped* TIntermediate::addIndex(TOperator op, TIntermTyped* base, TIntermTyped* index, + const TSourceLoc& loc) { // caller should set the type return addBinaryNode(op, base, index, loc); @@ -349,7 +349,8 @@ TIntermTyped* TIntermediate::addIndex(TOperator op, TIntermTyped* base, TIntermT // // Returns the added node. // -TIntermTyped* TIntermediate::addUnaryMath(TOperator op, TIntermTyped* child, TSourceLoc loc) +TIntermTyped* TIntermediate::addUnaryMath(TOperator op, TIntermTyped* child, + const TSourceLoc& loc) { if (child == 0) return nullptr; @@ -495,7 +496,8 @@ TIntermTyped* TIntermediate::addBuiltInFunctionCall(const TSourceLoc& loc, TOper // Returns an aggregate node, which could be the one passed in if // it was already an aggregate. // -TIntermTyped* TIntermediate::setAggregateOperator(TIntermNode* node, TOperator op, const TType& type, TSourceLoc loc) +TIntermTyped* TIntermediate::setAggregateOperator(TIntermNode* node, TOperator op, const TType& type, + const TSourceLoc& loc) { TIntermAggregate* aggNode; @@ -510,8 +512,6 @@ TIntermTyped* TIntermediate::setAggregateOperator(TIntermNode* node, TOperator o // aggNode = new TIntermAggregate(); aggNode->getSequence().push_back(node); - if (loc.line == 0) - loc = node->getLoc(); } } else aggNode = new TIntermAggregate(); @@ -520,8 +520,8 @@ TIntermTyped* TIntermediate::setAggregateOperator(TIntermNode* node, TOperator o // Set the operator. // aggNode->setOperator(op); - if (loc.line != 0) - aggNode->setLoc(loc); + if (loc.line != 0 || node != nullptr) + aggNode->setLoc(loc.line != 0 ? loc : node->getLoc()); aggNode->setType(type); @@ -538,7 +538,7 @@ bool TIntermediate::isConversionAllowed(TOperator op, TIntermTyped* node) const return false; case EbtAtomicUint: case EbtSampler: - case EbtAccStructNV: + case EbtAccStruct: // opaque types can be passed to functions if (op == EOpFunction) break; @@ -819,22 +819,25 @@ TIntermTyped* TIntermediate::createConversion(TBasicType convertTo, TIntermTyped node->getBasicType() == EbtFloat || node->getBasicType() == EbtDouble); - if (! getArithemeticInt8Enabled()) { - if (((convertTo == EbtInt8 || convertTo == EbtUint8) && ! convertFromIntTypes) || - ((node->getBasicType() == EbtInt8 || node->getBasicType() == EbtUint8) && ! convertToIntTypes)) + if (((convertTo == EbtInt8 || convertTo == EbtUint8) && ! convertFromIntTypes) || + ((node->getBasicType() == EbtInt8 || node->getBasicType() == EbtUint8) && ! convertToIntTypes)) { + if (! getArithemeticInt8Enabled()) { return nullptr; + } } - if (! getArithemeticInt16Enabled()) { - if (((convertTo == EbtInt16 || convertTo == EbtUint16) && ! convertFromIntTypes) || - ((node->getBasicType() == EbtInt16 || node->getBasicType() == EbtUint16) && ! convertToIntTypes)) + if (((convertTo == EbtInt16 || convertTo == EbtUint16) && ! convertFromIntTypes) || + ((node->getBasicType() == EbtInt16 || node->getBasicType() == EbtUint16) && ! convertToIntTypes)) { + if (! getArithemeticInt16Enabled()) { return nullptr; + } } - if (! getArithemeticFloat16Enabled()) { - if ((convertTo == EbtFloat16 && ! convertFromFloatTypes) || - (node->getBasicType() == EbtFloat16 && ! convertToFloatTypes)) + if ((convertTo == EbtFloat16 && ! convertFromFloatTypes) || + (node->getBasicType() == EbtFloat16 && ! convertToFloatTypes)) { + if (! getArithemeticFloat16Enabled()) { return nullptr; + } } #endif @@ -887,7 +890,7 @@ TIntermTyped* TIntermediate::addConversion(TBasicType convertTo, TIntermTyped* n // Returns the converted pair of nodes. // Returns when there is no conversion. std::tuple -TIntermediate::addConversion(TOperator op, TIntermTyped* node0, TIntermTyped* node1) +TIntermediate::addPairConversion(TOperator op, TIntermTyped* node0, TIntermTyped* node1) { if (!isConversionAllowed(op, node0) || !isConversionAllowed(op, node1)) return std::make_tuple(nullptr, nullptr); @@ -940,7 +943,7 @@ TIntermediate::addConversion(TOperator op, TIntermTyped* node0, TIntermTyped* no if (node0->getBasicType() == node1->getBasicType()) return std::make_tuple(node0, node1); - promoteTo = getConversionDestinatonType(node0->getBasicType(), node1->getBasicType(), op); + promoteTo = getConversionDestinationType(node0->getBasicType(), node1->getBasicType(), op); if (std::get<0>(promoteTo) == EbtNumTypes || std::get<1>(promoteTo) == EbtNumTypes) return std::make_tuple(nullptr, nullptr); @@ -1040,64 +1043,30 @@ TIntermTyped* TIntermediate::addConversion(TOperator op, const TType& type, TInt // Note: callers are responsible for other aspects of shape, // like vector and matrix sizes. - TBasicType promoteTo; - // GL_EXT_shader_16bit_storage can't do OpConstantComposite with - // 16-bit types, so disable promotion for those types. - bool canPromoteConstant = true; - switch (op) { // // Explicit conversions (unary operations) // case EOpConstructBool: - promoteTo = EbtBool; - break; case EOpConstructFloat: - promoteTo = EbtFloat; - break; case EOpConstructInt: - promoteTo = EbtInt; - break; case EOpConstructUint: - promoteTo = EbtUint; - break; #ifndef GLSLANG_WEB case EOpConstructDouble: - promoteTo = EbtDouble; - break; case EOpConstructFloat16: - promoteTo = EbtFloat16; - canPromoteConstant = extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types) || - extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types_float16); - break; case EOpConstructInt8: - promoteTo = EbtInt8; - canPromoteConstant = extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types) || - extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types_int8); - break; case EOpConstructUint8: - promoteTo = EbtUint8; - canPromoteConstant = extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types) || - extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types_int8); - break; case EOpConstructInt16: - promoteTo = EbtInt16; - canPromoteConstant = extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types) || - extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types_int16); - break; case EOpConstructUint16: - promoteTo = EbtUint16; - canPromoteConstant = extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types) || - extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types_int16); - break; case EOpConstructInt64: - promoteTo = EbtInt64; - break; case EOpConstructUint64: - promoteTo = EbtUint64; break; + #endif + // + // Implicit conversions + // case EOpLogicalNot: case EOpFunctionCall: @@ -1152,9 +1121,7 @@ TIntermTyped* TIntermediate::addConversion(TOperator op, const TType& type, TInt if (type.getBasicType() == node->getType().getBasicType()) return node; - if (canImplicitlyPromote(node->getBasicType(), type.getBasicType(), op)) - promoteTo = type.getBasicType(); - else + if (! canImplicitlyPromote(node->getBasicType(), type.getBasicType(), op)) return nullptr; break; @@ -1164,9 +1131,7 @@ TIntermTyped* TIntermediate::addConversion(TOperator op, const TType& type, TInt case EOpLeftShiftAssign: case EOpRightShiftAssign: { - if (getSource() == EShSourceHlsl && node->getType().getBasicType() == EbtBool) - promoteTo = type.getBasicType(); - else { + if (!(getSource() == EShSourceHlsl && node->getType().getBasicType() == EbtBool)) { if (isTypeInt(type.getBasicType()) && isTypeInt(node->getBasicType())) return node; else @@ -1184,13 +1149,44 @@ TIntermTyped* TIntermediate::addConversion(TOperator op, const TType& type, TInt return nullptr; } + bool canPromoteConstant = true; +#ifndef GLSLANG_WEB + // GL_EXT_shader_16bit_storage can't do OpConstantComposite with + // 16-bit types, so disable promotion for those types. + // Many issues with this, from JohnK: + // - this isn't really right to discuss SPIR-V here + // - this could easily be entirely about scalars, so is overstepping + // - we should be looking at what the shader asked for, and saying whether or + // not it can be done, in the parser, by calling requireExtensions(), not + // changing language sementics on the fly by asking what extensions are in use + // - at the time of this writing (14-Aug-2020), no test results are changed by this. + switch (op) { + case EOpConstructFloat16: + canPromoteConstant = numericFeatures.contains(TNumericFeatures::shader_explicit_arithmetic_types) || + numericFeatures.contains(TNumericFeatures::shader_explicit_arithmetic_types_float16); + break; + case EOpConstructInt8: + case EOpConstructUint8: + canPromoteConstant = numericFeatures.contains(TNumericFeatures::shader_explicit_arithmetic_types) || + numericFeatures.contains(TNumericFeatures::shader_explicit_arithmetic_types_int8); + break; + case EOpConstructInt16: + case EOpConstructUint16: + canPromoteConstant = numericFeatures.contains(TNumericFeatures::shader_explicit_arithmetic_types) || + numericFeatures.contains(TNumericFeatures::shader_explicit_arithmetic_types_int16); + break; + default: + break; + } +#endif + if (canPromoteConstant && node->getAsConstantUnion()) - return promoteConstantUnion(promoteTo, node->getAsConstantUnion()); + return promoteConstantUnion(type.getBasicType(), node->getAsConstantUnion()); // // Add a new newNode for the conversion. // - TIntermTyped* newNode = createConversion(promoteTo, node); + TIntermTyped* newNode = createConversion(type.getBasicType(), node); return newNode; } @@ -1620,7 +1616,7 @@ bool TIntermediate::isFPIntegralConversion(TBasicType from, TBasicType to) const // bool TIntermediate::canImplicitlyPromote(TBasicType from, TBasicType to, TOperator op) const { - if (isEsProfile() || version == 110) + if ((isEsProfile() && version < 310 ) || version == 110) return false; if (from == to) @@ -1659,46 +1655,51 @@ bool TIntermediate::canImplicitlyPromote(TBasicType from, TBasicType to, TOperat } } - bool explicitTypesEnabled = extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types) || - extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types_int8) || - extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types_int16) || - extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types_int32) || - extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types_int64) || - extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types_float16) || - extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types_float32) || - extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types_float64); - - if (explicitTypesEnabled) { - // integral promotions - if (isIntegralPromotion(from, to)) { + if (getSource() == EShSourceHlsl) { + // HLSL + if (from == EbtBool && (to == EbtInt || to == EbtUint || to == EbtFloat)) return true; - } + } else { + // GLSL + if (isIntegralPromotion(from, to) || + isFPPromotion(from, to) || + isIntegralConversion(from, to) || + isFPConversion(from, to) || + isFPIntegralConversion(from, to)) { - // floating-point promotions - if (isFPPromotion(from, to)) { - return true; - } - - // integral conversions - if (isIntegralConversion(from, to)) { - return true; - } - - // floating-point conversions - if (isFPConversion(from, to)) { - return true; - } - - // floating-integral conversions - if (isFPIntegralConversion(from, to)) { - return true; - } - - // hlsl supported conversions - if (getSource() == EShSourceHlsl) { - if (from == EbtBool && (to == EbtInt || to == EbtUint || to == EbtFloat)) + if (numericFeatures.contains(TNumericFeatures::shader_explicit_arithmetic_types) || + numericFeatures.contains(TNumericFeatures::shader_explicit_arithmetic_types_int8) || + numericFeatures.contains(TNumericFeatures::shader_explicit_arithmetic_types_int16) || + numericFeatures.contains(TNumericFeatures::shader_explicit_arithmetic_types_int32) || + numericFeatures.contains(TNumericFeatures::shader_explicit_arithmetic_types_int64) || + numericFeatures.contains(TNumericFeatures::shader_explicit_arithmetic_types_float16) || + numericFeatures.contains(TNumericFeatures::shader_explicit_arithmetic_types_float32) || + numericFeatures.contains(TNumericFeatures::shader_explicit_arithmetic_types_float64)) { return true; + } } + } + + if (isEsProfile()) { + switch (to) { + case EbtFloat: + switch (from) { + case EbtInt: + case EbtUint: + return numericFeatures.contains(TNumericFeatures::shader_implicit_conversions); + default: + return false; + } + case EbtUint: + switch (from) { + case EbtInt: + return numericFeatures.contains(TNumericFeatures::shader_implicit_conversions); + default: + return false; + } + default: + return false; + } } else { switch (to) { case EbtDouble: @@ -1708,13 +1709,14 @@ bool TIntermediate::canImplicitlyPromote(TBasicType from, TBasicType to, TOperat case EbtInt64: case EbtUint64: case EbtFloat: - case EbtDouble: - return true; + return version >= 400 || numericFeatures.contains(TNumericFeatures::gpu_shader_fp64); case EbtInt16: case EbtUint16: - return extensionRequested(E_GL_AMD_gpu_shader_int16); + return (version >= 400 || numericFeatures.contains(TNumericFeatures::gpu_shader_fp64)) && + numericFeatures.contains(TNumericFeatures::gpu_shader_int16); case EbtFloat16: - return extensionRequested(E_GL_AMD_gpu_shader_half_float); + return (version >= 400 || numericFeatures.contains(TNumericFeatures::gpu_shader_fp64)) && + numericFeatures.contains(TNumericFeatures::gpu_shader_half_float); default: return false; } @@ -1722,16 +1724,14 @@ bool TIntermediate::canImplicitlyPromote(TBasicType from, TBasicType to, TOperat switch (from) { case EbtInt: case EbtUint: - case EbtFloat: return true; case EbtBool: return getSource() == EShSourceHlsl; case EbtInt16: case EbtUint16: - return extensionRequested(E_GL_AMD_gpu_shader_int16); + return numericFeatures.contains(TNumericFeatures::gpu_shader_int16); case EbtFloat16: - return - extensionRequested(E_GL_AMD_gpu_shader_half_float) || + return numericFeatures.contains(TNumericFeatures::gpu_shader_half_float) || getSource() == EShSourceHlsl; default: return false; @@ -1739,25 +1739,21 @@ bool TIntermediate::canImplicitlyPromote(TBasicType from, TBasicType to, TOperat case EbtUint: switch (from) { case EbtInt: - return version >= 400 || getSource() == EShSourceHlsl; - case EbtUint: - return true; + return version >= 400 || getSource() == EShSourceHlsl; case EbtBool: return getSource() == EShSourceHlsl; case EbtInt16: case EbtUint16: - return extensionRequested(E_GL_AMD_gpu_shader_int16); + return numericFeatures.contains(TNumericFeatures::gpu_shader_int16); default: return false; } case EbtInt: switch (from) { - case EbtInt: - return true; case EbtBool: return getSource() == EShSourceHlsl; case EbtInt16: - return extensionRequested(E_GL_AMD_gpu_shader_int16); + return numericFeatures.contains(TNumericFeatures::gpu_shader_int16); default: return false; } @@ -1766,21 +1762,19 @@ bool TIntermediate::canImplicitlyPromote(TBasicType from, TBasicType to, TOperat case EbtInt: case EbtUint: case EbtInt64: - case EbtUint64: return true; case EbtInt16: case EbtUint16: - return extensionRequested(E_GL_AMD_gpu_shader_int16); + return numericFeatures.contains(TNumericFeatures::gpu_shader_int16); default: return false; } case EbtInt64: switch (from) { case EbtInt: - case EbtInt64: return true; case EbtInt16: - return extensionRequested(E_GL_AMD_gpu_shader_int16); + return numericFeatures.contains(TNumericFeatures::gpu_shader_int16); default: return false; } @@ -1788,9 +1782,7 @@ bool TIntermediate::canImplicitlyPromote(TBasicType from, TBasicType to, TOperat switch (from) { case EbtInt16: case EbtUint16: - return extensionRequested(E_GL_AMD_gpu_shader_int16); - case EbtFloat16: - return extensionRequested(E_GL_AMD_gpu_shader_half_float); + return numericFeatures.contains(TNumericFeatures::gpu_shader_int16); default: break; } @@ -1798,8 +1790,7 @@ bool TIntermediate::canImplicitlyPromote(TBasicType from, TBasicType to, TOperat case EbtUint16: switch (from) { case EbtInt16: - case EbtUint16: - return extensionRequested(E_GL_AMD_gpu_shader_int16); + return numericFeatures.contains(TNumericFeatures::gpu_shader_int16); default: break; } @@ -1926,12 +1917,14 @@ static TBasicType getCorrespondingUnsignedType(TBasicType type) // integer type corresponding to the type of the operand with signed // integer type. -std::tuple TIntermediate::getConversionDestinatonType(TBasicType type0, TBasicType type1, TOperator op) const +std::tuple TIntermediate::getConversionDestinationType(TBasicType type0, TBasicType type1, TOperator op) const { TBasicType res0 = EbtNumTypes; TBasicType res1 = EbtNumTypes; - if (isEsProfile() || version == 110) + if ((isEsProfile() && + (version < 310 || !numericFeatures.contains(TNumericFeatures::shader_implicit_conversions))) || + version == 110) return std::make_tuple(res0, res1); if (getSource() == EShSourceHlsl) { @@ -2463,7 +2456,7 @@ TIntermTyped* TIntermediate::addSelection(TIntermTyped* cond, TIntermTyped* true // // Get compatible types. // - auto children = addConversion(EOpSequence, trueBlock, falseBlock); + auto children = addPairConversion(EOpSequence, trueBlock, falseBlock); trueBlock = std::get<0>(children); falseBlock = std::get<1>(children); @@ -2750,6 +2743,22 @@ TIntermBranch* TIntermediate::addBranch(TOperator branchOp, TIntermTyped* expres return node; } +// Propagate precision from formal function return type to actual return type, +// and on to its subtree. +void TIntermBranch::updatePrecision(TPrecisionQualifier parentPrecision) +{ + TIntermTyped* exp = getExpression(); + if (exp == nullptr) + return; + + if (exp->getBasicType() == EbtInt || exp->getBasicType() == EbtUint || + exp->getBasicType() == EbtFloat || exp->getBasicType() == EbtFloat16) { + if (parentPrecision != EpqNone && exp->getQualifier().precision == EpqNone) { + exp->propagatePrecision(parentPrecision); + } + } +} + // // This is to be executed after the final root is put on top by the parsing // process. @@ -2774,6 +2783,9 @@ bool TIntermediate::postProcess(TIntermNode* root, EShLanguage /*language*/) case EShTexSampTransUpgradeTextureRemoveSampler: performTextureUpgradeAndSamplerRemovalTransformation(root); break; + case EShTexSampTransCount: + assert(0); + break; } #endif @@ -3234,10 +3246,17 @@ bool TIntermediate::promoteUnary(TIntermUnary& node) return false; break; - default: - if (operand->getBasicType() != EbtFloat) + // HLSL uses this path for initial function signature finding for built-ins + // taking a single argument, which generally don't participate in + // operator-based type promotion (type conversion will occur later). + // For now, scalar argument cases are relying on the setType() call below. + if (getSource() == EShSourceHlsl) + break; + // GLSL only allows integer arguments for the cases identified above in the + // case statements. + if (operand->getBasicType() != EbtFloat) return false; } @@ -3247,9 +3266,11 @@ bool TIntermediate::promoteUnary(TIntermUnary& node) return true; } +// Propagate precision qualifiers *up* from children to parent. void TIntermUnary::updatePrecision() { - if (getBasicType() == EbtInt || getBasicType() == EbtUint || getBasicType() == EbtFloat || getBasicType() == EbtFloat16) { + if (getBasicType() == EbtInt || getBasicType() == EbtUint || + getBasicType() == EbtFloat || getBasicType() == EbtFloat16) { if (operand->getQualifier().precision > getQualifier().precision) getQualifier().precision = operand->getQualifier().precision; } @@ -3745,9 +3766,12 @@ bool TIntermediate::promoteAggregate(TIntermAggregate& node) return false; } +// Propagate precision qualifiers *up* from children to parent, and then +// back *down* again to the children's subtrees. void TIntermBinary::updatePrecision() { - if (getBasicType() == EbtInt || getBasicType() == EbtUint || getBasicType() == EbtFloat || getBasicType() == EbtFloat16) { + if (getBasicType() == EbtInt || getBasicType() == EbtUint || + getBasicType() == EbtFloat || getBasicType() == EbtFloat16) { getQualifier().precision = std::max(right->getQualifier().precision, left->getQualifier().precision); if (getQualifier().precision != EpqNone) { left->propagatePrecision(getQualifier().precision); @@ -3756,9 +3780,14 @@ void TIntermBinary::updatePrecision() } } +// Recursively propagate precision qualifiers *down* the subtree of the current node, +// until reaching a node that already has a precision qualifier or otherwise does +// not participate in precision propagation. void TIntermTyped::propagatePrecision(TPrecisionQualifier newPrecision) { - if (getQualifier().precision != EpqNone || (getBasicType() != EbtInt && getBasicType() != EbtUint && getBasicType() != EbtFloat && getBasicType() != EbtFloat16)) + if (getQualifier().precision != EpqNone || + (getBasicType() != EbtInt && getBasicType() != EbtUint && + getBasicType() != EbtFloat && getBasicType() != EbtFloat16)) return; getQualifier().precision = newPrecision; diff --git a/thirdparty/glslang/glslang/MachineIndependent/LiveTraverser.h b/thirdparty/glslang/glslang/MachineIndependent/LiveTraverser.h index 7333bc964e4..9b39b5983f5 100644 --- a/thirdparty/glslang/glslang/MachineIndependent/LiveTraverser.h +++ b/thirdparty/glslang/glslang/MachineIndependent/LiveTraverser.h @@ -74,14 +74,33 @@ public: for (unsigned int f = 0; f < globals.size(); ++f) { TIntermAggregate* candidate = globals[f]->getAsAggregate(); if (candidate && candidate->getOp() == EOpFunction && candidate->getName() == name) { - functions.push_back(candidate); + destinations.push_back(candidate); break; } } } - typedef std::list TFunctionStack; - TFunctionStack functions; + void pushGlobalReference(const TString& name) + { + TIntermSequence& globals = intermediate.getTreeRoot()->getAsAggregate()->getSequence(); + for (unsigned int f = 0; f < globals.size(); ++f) { + TIntermAggregate* candidate = globals[f]->getAsAggregate(); + if (candidate && candidate->getOp() == EOpSequence && + candidate->getSequence().size() == 1 && + candidate->getSequence()[0]->getAsBinaryNode()) { + TIntermBinary* binary = candidate->getSequence()[0]->getAsBinaryNode(); + TIntermSymbol* symbol = binary->getLeft()->getAsSymbolNode(); + if (symbol && symbol->getQualifier().storage == EvqGlobal && + symbol->getName() == name) { + destinations.push_back(candidate); + break; + } + } + } + } + + typedef std::list TDestinationStack; + TDestinationStack destinations; protected: // To catch which function calls are not dead, and hence which functions must be visited. @@ -117,16 +136,27 @@ protected: // and only visit each function once. void addFunctionCall(TIntermAggregate* call) { - // // just use the map to ensure we process each function at most once + // just use the map to ensure we process each function at most once if (liveFunctions.find(call->getName()) == liveFunctions.end()) { liveFunctions.insert(call->getName()); pushFunction(call->getName()); } } + void addGlobalReference(const TString& name) + { + // just use the map to ensure we process each global at most once + if (liveGlobals.find(name) == liveGlobals.end()) { + liveGlobals.insert(name); + pushGlobalReference(name); + } + } + const TIntermediate& intermediate; typedef std::unordered_set TLiveFunctions; TLiveFunctions liveFunctions; + typedef std::unordered_set TLiveGlobals; + TLiveGlobals liveGlobals; bool traverseAll; private: diff --git a/thirdparty/glslang/glslang/MachineIndependent/ParseContextBase.cpp b/thirdparty/glslang/glslang/MachineIndependent/ParseContextBase.cpp index 282ecca0e07..b46400914c8 100644 --- a/thirdparty/glslang/glslang/MachineIndependent/ParseContextBase.cpp +++ b/thirdparty/glslang/glslang/MachineIndependent/ParseContextBase.cpp @@ -157,11 +157,11 @@ bool TParseContextBase::lValueErrorCheck(const TSourceLoc& loc, const char* op, case EvqBuffer: if (node->getQualifier().isReadOnly()) message = "can't modify a readonly buffer"; - if (node->getQualifier().isShaderRecordNV()) + if (node->getQualifier().isShaderRecord()) message = "can't modify a shaderrecordnv qualified buffer"; break; - case EvqHitAttrNV: - if (language != EShLangIntersectNV) + case EvqHitAttr: + if (language != EShLangIntersect) message = "cannot modify hitAttributeNV in this stage"; break; #endif @@ -181,9 +181,12 @@ bool TParseContextBase::lValueErrorCheck(const TSourceLoc& loc, const char* op, case EbtAtomicUint: message = "can't modify an atomic_uint"; break; - case EbtAccStructNV: + case EbtAccStruct: message = "can't modify accelerationStructureNV"; break; + case EbtRayQuery: + message = "can't modify rayQueryEXT"; + break; #endif default: break; diff --git a/thirdparty/glslang/glslang/MachineIndependent/ParseHelper.cpp b/thirdparty/glslang/glslang/MachineIndependent/ParseHelper.cpp index a2224e1609c..86a5a378661 100644 --- a/thirdparty/glslang/glslang/MachineIndependent/ParseHelper.cpp +++ b/thirdparty/glslang/glslang/MachineIndependent/ParseHelper.cpp @@ -3,6 +3,7 @@ // Copyright (C) 2012-2015 LunarG, Inc. // Copyright (C) 2015-2018 Google, Inc. // Copyright (C) 2017, 2019 ARM Limited. +// Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved. // // All rights reserved. // @@ -245,7 +246,9 @@ void TParseContext::handlePragma(const TSourceLoc& loc, const TVector& else if (tokens[2].compare("off") == 0) contextPragma.optimize = false; else { - error(loc, "\"on\" or \"off\" expected after '(' for 'optimize' pragma", "#pragma", ""); + if(relaxedErrors()) + // If an implementation does not recognize the tokens following #pragma, then it will ignore that pragma. + warn(loc, "\"on\" or \"off\" expected after '(' for 'optimize' pragma", "#pragma", ""); return; } @@ -269,7 +272,9 @@ void TParseContext::handlePragma(const TSourceLoc& loc, const TVector& else if (tokens[2].compare("off") == 0) contextPragma.debug = false; else { - error(loc, "\"on\" or \"off\" expected after '(' for 'debug' pragma", "#pragma", ""); + if(relaxedErrors()) + // If an implementation does not recognize the tokens following #pragma, then it will ignore that pragma. + warn(loc, "\"on\" or \"off\" expected after '(' for 'debug' pragma", "#pragma", ""); return; } @@ -427,8 +432,18 @@ TIntermTyped* TParseContext::handleBracketDereference(const TSourceLoc& loc, TIn #ifndef GLSLANG_WEB if (base->isReference() && ! base->isArray()) { requireExtensions(loc, 1, &E_GL_EXT_buffer_reference2, "buffer reference indexing"); - result = intermediate.addBinaryMath(EOpAdd, base, index, loc); - result->setType(base->getType()); + if (base->getType().getReferentType()->containsUnsizedArray()) { + error(loc, "cannot index reference to buffer containing an unsized array", "", ""); + result = nullptr; + } else { + result = intermediate.addBinaryMath(EOpAdd, base, index, loc); + if (result != nullptr) + result->setType(base->getType()); + } + if (result == nullptr) { + error(loc, "cannot index buffer reference", "", ""); + result = intermediate.addConstantUnion(0.0, EbtFloat, loc); + } return result; } if (base->getAsSymbolNode() && isIoResizeArray(base->getType())) @@ -740,8 +755,11 @@ TIntermTyped* TParseContext::handleBinaryMath(const TSourceLoc& loc, const char* } TIntermTyped* result = nullptr; - if (allowed) + if (allowed) { + if ((left->isReference() || right->isReference())) + requireExtensions(loc, 1, &E_GL_EXT_buffer_reference2, "buffer reference math"); result = intermediate.addBinaryMath(op, left, right, loc); + } if (result == nullptr) binaryOpError(loc, str, left->getCompleteString(), right->getCompleteString()); @@ -821,50 +839,7 @@ TIntermTyped* TParseContext::handleDotDereference(const TSourceLoc& loc, TInterm TIntermTyped* result = base; if ((base->isVector() || base->isScalar()) && (base->isFloatingDomain() || base->isIntegerDomain() || base->getBasicType() == EbtBool)) { - if (base->isScalar()) { - const char* dotFeature = "scalar swizzle"; - requireProfile(loc, ~EEsProfile, dotFeature); - profileRequires(loc, ~EEsProfile, 420, E_GL_ARB_shading_language_420pack, dotFeature); - } - - TSwizzleSelectors selectors; - parseSwizzleSelector(loc, field, base->getVectorSize(), selectors); - - if (base->isVector() && selectors.size() != 1 && base->getType().contains16BitFloat()) - requireFloat16Arithmetic(loc, ".", "can't swizzle types containing float16"); - if (base->isVector() && selectors.size() != 1 && base->getType().contains16BitInt()) - requireInt16Arithmetic(loc, ".", "can't swizzle types containing (u)int16"); - if (base->isVector() && selectors.size() != 1 && base->getType().contains8BitInt()) - requireInt8Arithmetic(loc, ".", "can't swizzle types containing (u)int8"); - - if (base->isScalar()) { - if (selectors.size() == 1) - return result; - else { - TType type(base->getBasicType(), EvqTemporary, selectors.size()); - // Swizzle operations propagate specialization-constantness - if (base->getQualifier().isSpecConstant()) - type.getQualifier().makeSpecConstant(); - return addConstructor(loc, base, type); - } - } - - if (base->getType().getQualifier().isFrontEndConstant()) - result = intermediate.foldSwizzle(base, selectors, loc); - else { - if (selectors.size() == 1) { - TIntermTyped* index = intermediate.addConstantUnion(selectors[0], loc); - result = intermediate.addIndex(EOpIndexDirect, base, index, loc); - result->setType(TType(base->getBasicType(), EvqTemporary, base->getType().getQualifier().precision)); - } else { - TIntermTyped* index = intermediate.addSwizzle(selectors, loc); - result = intermediate.addIndex(EOpVectorSwizzle, base, index, loc); - result->setType(TType(base->getBasicType(), EvqTemporary, base->getType().getQualifier().precision, selectors.size())); - } - // Swizzle operations propagate specialization-constantness - if (base->getType().getQualifier().isSpecConstant()) - result->getWritableType().getQualifier().makeSpecConstant(); - } + result = handleDotSwizzle(loc, base, field); } else if (base->isStruct() || base->isReference()) { const TTypeList* fields = base->isReference() ? base->getType().getReferentType()->getStruct() : @@ -905,6 +880,60 @@ TIntermTyped* TParseContext::handleDotDereference(const TSourceLoc& loc, TInterm return result; } +// +// Handle seeing a base.swizzle, a subset of base.identifier in the grammar. +// +TIntermTyped* TParseContext::handleDotSwizzle(const TSourceLoc& loc, TIntermTyped* base, const TString& field) +{ + TIntermTyped* result = base; + if (base->isScalar()) { + const char* dotFeature = "scalar swizzle"; + requireProfile(loc, ~EEsProfile, dotFeature); + profileRequires(loc, ~EEsProfile, 420, E_GL_ARB_shading_language_420pack, dotFeature); + } + + TSwizzleSelectors selectors; + parseSwizzleSelector(loc, field, base->getVectorSize(), selectors); + + if (base->isVector() && selectors.size() != 1 && base->getType().contains16BitFloat()) + requireFloat16Arithmetic(loc, ".", "can't swizzle types containing float16"); + if (base->isVector() && selectors.size() != 1 && base->getType().contains16BitInt()) + requireInt16Arithmetic(loc, ".", "can't swizzle types containing (u)int16"); + if (base->isVector() && selectors.size() != 1 && base->getType().contains8BitInt()) + requireInt8Arithmetic(loc, ".", "can't swizzle types containing (u)int8"); + + if (base->isScalar()) { + if (selectors.size() == 1) + return result; + else { + TType type(base->getBasicType(), EvqTemporary, selectors.size()); + // Swizzle operations propagate specialization-constantness + if (base->getQualifier().isSpecConstant()) + type.getQualifier().makeSpecConstant(); + return addConstructor(loc, base, type); + } + } + + if (base->getType().getQualifier().isFrontEndConstant()) + result = intermediate.foldSwizzle(base, selectors, loc); + else { + if (selectors.size() == 1) { + TIntermTyped* index = intermediate.addConstantUnion(selectors[0], loc); + result = intermediate.addIndex(EOpIndexDirect, base, index, loc); + result->setType(TType(base->getBasicType(), EvqTemporary, base->getType().getQualifier().precision)); + } else { + TIntermTyped* index = intermediate.addSwizzle(selectors, loc); + result = intermediate.addIndex(EOpVectorSwizzle, base, index, loc); + result->setType(TType(base->getBasicType(), EvqTemporary, base->getType().getQualifier().precision, selectors.size())); + } + // Swizzle operations propagate specialization-constantness + if (base->getType().getQualifier().isSpecConstant()) + result->getWritableType().getQualifier().makeSpecConstant(); + } + + return result; +} + void TParseContext::blockMemberExtensionCheck(const TSourceLoc& loc, const TIntermTyped* base, int member, const TString& memberName) { // a block that needs extension checking is either 'base', or if arrayed, @@ -1273,7 +1302,7 @@ TIntermTyped* TParseContext::handleBuiltInFunctionCall(TSourceLoc loc, TIntermNo TIntermTyped *result = intermediate.addBuiltInFunctionCall(loc, function.getBuiltInOp(), function.getParamCount() == 1, arguments, function.getType()); - if (obeyPrecisionQualifiers()) + if (result != nullptr && obeyPrecisionQualifiers()) computeBuiltinPrecisions(*result, function); if (result == nullptr) { @@ -1353,6 +1382,9 @@ void TParseContext::computeBuiltinPrecisions(TIntermTyped& node, const TFunction case EOpInterpolateAtSample: numArgs = 1; break; + case EOpDebugPrintf: + numArgs = 0; + break; default: break; } @@ -1390,23 +1422,28 @@ TIntermNode* TParseContext::handleReturnValue(const TSourceLoc& loc, TIntermType #endif functionReturnsValue = true; + TIntermBranch* branch = nullptr; if (currentFunctionType->getBasicType() == EbtVoid) { error(loc, "void function cannot return a value", "return", ""); - return intermediate.addBranch(EOpReturn, loc); + branch = intermediate.addBranch(EOpReturn, loc); } else if (*currentFunctionType != value->getType()) { TIntermTyped* converted = intermediate.addConversion(EOpReturn, *currentFunctionType, value); if (converted) { if (*currentFunctionType != converted->getType()) error(loc, "cannot convert return value to function return type", "return", ""); if (version < 420) - warn(loc, "type conversion on return values was not explicitly allowed until version 420", "return", ""); - return intermediate.addBranch(EOpReturn, converted, loc); + warn(loc, "type conversion on return values was not explicitly allowed until version 420", + "return", ""); + branch = intermediate.addBranch(EOpReturn, converted, loc); } else { error(loc, "type does not match, or is not convertible to, the function's return type", "return", ""); - return intermediate.addBranch(EOpReturn, value, loc); + branch = intermediate.addBranch(EOpReturn, value, loc); } } else - return intermediate.addBranch(EOpReturn, value, loc); + branch = intermediate.addBranch(EOpReturn, value, loc); + + branch->updatePrecision(currentFunctionType->getQualifier().precision); + return branch; } // See if the operation is being done in an illegal location. @@ -1650,6 +1687,14 @@ TIntermTyped* TParseContext::addOutputArgumentConversions(const TFunction& funct #endif } +TIntermTyped* TParseContext::addAssign(const TSourceLoc& loc, TOperator op, TIntermTyped* left, TIntermTyped* right) +{ + if ((op == EOpAddAssign || op == EOpSubAssign) && left->isReference()) + requireExtensions(loc, 1, &E_GL_EXT_buffer_reference2, "+= and -= on a buffer reference"); + + return intermediate.addAssign(op, left, right, loc); +} + void TParseContext::memorySemanticsCheck(const TSourceLoc& loc, const TFunction& fnCandidate, const TIntermOperator& callNode) { const TIntermSequence* argp = &callNode.getAsAggregate()->getSequence(); @@ -1672,6 +1717,9 @@ void TParseContext::memorySemanticsCheck(const TSourceLoc& loc, const TFunction& unsigned int semantics = 0, storageClassSemantics = 0; unsigned int semantics2 = 0, storageClassSemantics2 = 0; + const TIntermTyped* arg0 = (*argp)[0]->getAsTyped(); + const bool isMS = arg0->getBasicType() == EbtSampler && arg0->getType().getSampler().isMultiSample(); + // Grab the semantics and storage class semantics from the operands, based on opcode switch (callNode.getOp()) { case EOpAtomicAdd: @@ -1704,18 +1752,18 @@ void TParseContext::memorySemanticsCheck(const TSourceLoc& loc, const TFunction& case EOpImageAtomicXor: case EOpImageAtomicExchange: case EOpImageAtomicStore: - storageClassSemantics = (*argp)[4]->getAsConstantUnion()->getConstArray()[0].getIConst(); - semantics = (*argp)[5]->getAsConstantUnion()->getConstArray()[0].getIConst(); + storageClassSemantics = (*argp)[isMS ? 5 : 4]->getAsConstantUnion()->getConstArray()[0].getIConst(); + semantics = (*argp)[isMS ? 6 : 5]->getAsConstantUnion()->getConstArray()[0].getIConst(); break; case EOpImageAtomicLoad: - storageClassSemantics = (*argp)[3]->getAsConstantUnion()->getConstArray()[0].getIConst(); - semantics = (*argp)[4]->getAsConstantUnion()->getConstArray()[0].getIConst(); + storageClassSemantics = (*argp)[isMS ? 4 : 3]->getAsConstantUnion()->getConstArray()[0].getIConst(); + semantics = (*argp)[isMS ? 5 : 4]->getAsConstantUnion()->getConstArray()[0].getIConst(); break; case EOpImageAtomicCompSwap: - storageClassSemantics = (*argp)[5]->getAsConstantUnion()->getConstArray()[0].getIConst(); - semantics = (*argp)[6]->getAsConstantUnion()->getConstArray()[0].getIConst(); - storageClassSemantics2 = (*argp)[7]->getAsConstantUnion()->getConstArray()[0].getIConst(); - semantics2 = (*argp)[8]->getAsConstantUnion()->getConstArray()[0].getIConst(); + storageClassSemantics = (*argp)[isMS ? 6 : 5]->getAsConstantUnion()->getConstArray()[0].getIConst(); + semantics = (*argp)[isMS ? 7 : 6]->getAsConstantUnion()->getConstArray()[0].getIConst(); + storageClassSemantics2 = (*argp)[isMS ? 8 : 7]->getAsConstantUnion()->getConstArray()[0].getIConst(); + semantics2 = (*argp)[isMS ? 9 : 8]->getAsConstantUnion()->getConstArray()[0].getIConst(); break; case EOpBarrier: @@ -2006,18 +2054,20 @@ void TParseContext::builtInOpCheck(const TSourceLoc& loc, const TFunction& fnCan if (arg > 0) { #ifndef GLSLANG_WEB - bool f16ShadowCompare = (*argp)[1]->getAsTyped()->getBasicType() == EbtFloat16 && arg0->getType().getSampler().shadow; + bool f16ShadowCompare = (*argp)[1]->getAsTyped()->getBasicType() == EbtFloat16 && + arg0->getType().getSampler().shadow; if (f16ShadowCompare) ++arg; #endif - if (! (*argp)[arg]->getAsConstantUnion()) + if (! (*argp)[arg]->getAsTyped()->getQualifier().isConstant()) error(loc, "argument must be compile-time constant", "texel offset", ""); - else { + else if ((*argp)[arg]->getAsConstantUnion()) { const TType& type = (*argp)[arg]->getAsTyped()->getType(); for (int c = 0; c < type.getVectorSize(); ++c) { int offset = (*argp)[arg]->getAsConstantUnion()->getConstArray()[c].getIConst(); if (offset > resources.maxProgramTexelOffset || offset < resources.minProgramTexelOffset) - error(loc, "value is out of range:", "texel offset", "[gl_MinProgramTexelOffset, gl_MaxProgramTexelOffset]"); + error(loc, "value is out of range:", "texel offset", + "[gl_MinProgramTexelOffset, gl_MaxProgramTexelOffset]"); } } } @@ -2026,15 +2076,32 @@ void TParseContext::builtInOpCheck(const TSourceLoc& loc, const TFunction& fnCan } #ifndef GLSLANG_WEB - case EOpTraceNV: + case EOpTrace: if (!(*argp)[10]->getAsConstantUnion()) error(loc, "argument must be compile-time constant", "payload number", ""); break; - case EOpExecuteCallableNV: + case EOpExecuteCallable: if (!(*argp)[1]->getAsConstantUnion()) error(loc, "argument must be compile-time constant", "callable data number", ""); break; + case EOpRayQueryGetIntersectionType: + case EOpRayQueryGetIntersectionT: + case EOpRayQueryGetIntersectionInstanceCustomIndex: + case EOpRayQueryGetIntersectionInstanceId: + case EOpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffset: + case EOpRayQueryGetIntersectionGeometryIndex: + case EOpRayQueryGetIntersectionPrimitiveIndex: + case EOpRayQueryGetIntersectionBarycentrics: + case EOpRayQueryGetIntersectionFrontFace: + case EOpRayQueryGetIntersectionObjectRayDirection: + case EOpRayQueryGetIntersectionObjectRayOrigin: + case EOpRayQueryGetIntersectionObjectToWorld: + case EOpRayQueryGetIntersectionWorldToObject: + if (!(*argp)[1]->getAsConstantUnion()) + error(loc, "argument must be compile-time constant", "committed", ""); + break; + case EOpTextureQuerySamples: case EOpImageQuerySamples: // GL_ARB_shader_texture_image_samples @@ -2058,7 +2125,14 @@ void TParseContext::builtInOpCheck(const TSourceLoc& loc, const TFunction& fnCan if (imageType.getQualifier().getFormat() != ElfR32i && imageType.getQualifier().getFormat() != ElfR32ui) error(loc, "only supported on image with format r32i or r32ui", fnCandidate.getName().c_str(), ""); } else { - if (fnCandidate.getName().compare(0, 19, "imageAtomicExchange") != 0) + bool isImageAtomicOnFloatAllowed = ((fnCandidate.getName().compare(0, 14, "imageAtomicAdd") == 0) || + (fnCandidate.getName().compare(0, 15, "imageAtomicLoad") == 0) || + (fnCandidate.getName().compare(0, 16, "imageAtomicStore") == 0) || + (fnCandidate.getName().compare(0, 19, "imageAtomicExchange") == 0)); + if (imageType.getSampler().type == EbtFloat && isImageAtomicOnFloatAllowed && + (fnCandidate.getName().compare(0, 19, "imageAtomicExchange") != 0)) // imageAtomicExchange doesn't require GL_EXT_shader_atomic_float + requireExtensions(loc, 1, &E_GL_EXT_shader_atomic_float, fnCandidate.getName().c_str()); + if (!isImageAtomicOnFloatAllowed) error(loc, "only supported on integer images", fnCandidate.getName().c_str(), ""); else if (imageType.getQualifier().getFormat() != ElfR32f && isEsProfile()) error(loc, "only supported on image with format r32f", fnCandidate.getName().c_str(), ""); @@ -2087,10 +2161,18 @@ void TParseContext::builtInOpCheck(const TSourceLoc& loc, const TFunction& fnCan if (argp->size() > 3) { requireExtensions(loc, 1, &E_GL_KHR_memory_scope_semantics, fnCandidate.getName().c_str()); memorySemanticsCheck(loc, fnCandidate, callNode); + if ((callNode.getOp() == EOpAtomicAdd || callNode.getOp() == EOpAtomicExchange || + callNode.getOp() == EOpAtomicLoad || callNode.getOp() == EOpAtomicStore) && + (arg0->getType().isFloatingDomain())) { + requireExtensions(loc, 1, &E_GL_EXT_shader_atomic_float, fnCandidate.getName().c_str()); + } } else if (arg0->getType().getBasicType() == EbtInt64 || arg0->getType().getBasicType() == EbtUint64) { const char* const extensions[2] = { E_GL_NV_shader_atomic_int64, E_GL_EXT_shader_atomic_int64 }; requireExtensions(loc, 2, extensions, fnCandidate.getName().c_str()); + } else if ((callNode.getOp() == EOpAtomicAdd || callNode.getOp() == EOpAtomicExchange) && + (arg0->getType().isFloatingDomain())) { + requireExtensions(loc, 1, &E_GL_EXT_shader_atomic_float, fnCandidate.getName().c_str()); } break; } @@ -2172,6 +2254,28 @@ void TParseContext::builtInOpCheck(const TSourceLoc& loc, const TFunction& fnCan memorySemanticsCheck(loc, fnCandidate, callNode); } break; + + case EOpMix: + if (profile == EEsProfile && version < 310) { + // Look for specific signatures + if ((*argp)[0]->getAsTyped()->getBasicType() != EbtFloat && + (*argp)[1]->getAsTyped()->getBasicType() != EbtFloat && + (*argp)[2]->getAsTyped()->getBasicType() == EbtBool) { + requireExtensions(loc, 1, &E_GL_EXT_shader_integer_mix, "specific signature of builtin mix"); + } + } + + if (profile != EEsProfile && version < 450) { + if ((*argp)[0]->getAsTyped()->getBasicType() != EbtFloat && + (*argp)[0]->getAsTyped()->getBasicType() != EbtDouble && + (*argp)[1]->getAsTyped()->getBasicType() != EbtFloat && + (*argp)[1]->getAsTyped()->getBasicType() != EbtDouble && + (*argp)[2]->getAsTyped()->getBasicType() == EbtBool) { + requireExtensions(loc, 1, &E_GL_EXT_shader_integer_mix, fnCandidate.getName().c_str()); + } + } + + break; #endif default: @@ -2698,7 +2802,10 @@ void TParseContext::reservedPpErrorCheck(const TSourceLoc& loc, const char* iden if (strncmp(identifier, "GL_", 3) == 0) ppError(loc, "names beginning with \"GL_\" can't be (un)defined:", op, identifier); else if (strncmp(identifier, "defined", 8) == 0) - ppError(loc, "\"defined\" can't be (un)defined:", op, identifier); + if (relaxedErrors()) + ppWarn(loc, "\"defined\" is (un)defined:", op, identifier); + else + ppError(loc, "\"defined\" can't be (un)defined:", op, identifier); else if (strstr(identifier, "__") != 0) { if (isEsProfile() && version >= 300 && (strcmp(identifier, "__LINE__") == 0 || @@ -2706,7 +2813,7 @@ void TParseContext::reservedPpErrorCheck(const TSourceLoc& loc, const char* iden strcmp(identifier, "__VERSION__") == 0)) ppError(loc, "predefined names can't be (un)defined:", op, identifier); else { - if (isEsProfile() && version < 300) + if (isEsProfile() && version < 300 && !relaxedErrors()) ppError(loc, "names containing consecutive underscores are reserved, and an error if version < 300:", op, identifier); else ppWarn(loc, "names containing consecutive underscores are reserved:", op, identifier); @@ -3083,7 +3190,7 @@ bool TParseContext::constructorError(const TSourceLoc& loc, TIntermNode* node, T error(loc, "constructor argument does not have a type", "constructor", ""); return true; } - if (op != EOpConstructStruct && typed->getBasicType() == EbtSampler) { + if (op != EOpConstructStruct && op != EOpConstructNonuniform && typed->getBasicType() == EbtSampler) { error(loc, "cannot convert a sampler", "constructor", ""); return true; } @@ -3128,7 +3235,7 @@ bool TParseContext::constructorTextureSamplerError(const TSourceLoc& loc, const if (function[0].type->getBasicType() != EbtSampler || ! function[0].type->getSampler().isTexture() || function[0].type->isArray()) { - error(loc, "sampler-constructor first argument must be a scalar textureXXX type", token, ""); + error(loc, "sampler-constructor first argument must be a scalar *texture* type", token, ""); return true; } // simulate the first argument's impact on the result type, so it can be compared with the encapsulated operator!=() @@ -3136,7 +3243,8 @@ bool TParseContext::constructorTextureSamplerError(const TSourceLoc& loc, const texture.setCombined(false); texture.shadow = false; if (texture != function[0].type->getSampler()) { - error(loc, "sampler-constructor first argument must match type and dimensionality of constructor type", token, ""); + error(loc, "sampler-constructor first argument must be a *texture* type" + " matching the dimensionality and sampled type of the constructor", token, ""); return true; } @@ -3146,7 +3254,7 @@ bool TParseContext::constructorTextureSamplerError(const TSourceLoc& loc, const if ( function[1].type->getBasicType() != EbtSampler || ! function[1].type->getSampler().isPureSampler() || function[1].type->isArray()) { - error(loc, "sampler-constructor second argument must be a scalar type 'sampler'", token, ""); + error(loc, "sampler-constructor second argument must be a scalar sampler or samplerShadow", token, ""); return true; } @@ -3222,14 +3330,14 @@ void TParseContext::atomicUintCheck(const TSourceLoc& loc, const TType& type, co error(loc, "atomic_uints can only be used in uniform variables or function parameters:", type.getBasicTypeString().c_str(), identifier.c_str()); } -void TParseContext::accStructNVCheck(const TSourceLoc& loc, const TType& type, const TString& identifier) +void TParseContext::accStructCheck(const TSourceLoc& loc, const TType& type, const TString& identifier) { if (type.getQualifier().storage == EvqUniform) return; - if (type.getBasicType() == EbtStruct && containsFieldWithBasicType(type, EbtAccStructNV)) + if (type.getBasicType() == EbtStruct && containsFieldWithBasicType(type, EbtAccStruct)) error(loc, "non-uniform struct contains an accelerationStructureNV:", type.getBasicTypeString().c_str(), identifier.c_str()); - else if (type.getBasicType() == EbtAccStructNV && type.getQualifier().storage != EvqUniform) + else if (type.getBasicType() == EbtAccStruct && type.getQualifier().storage != EvqUniform) error(loc, "accelerationStructureNV can only be used in uniform variables or function parameters:", type.getBasicTypeString().c_str(), identifier.c_str()); @@ -3327,6 +3435,11 @@ void TParseContext::globalQualifierTypeCheck(const TSourceLoc& loc, const TQuali !qualifier.hasBufferReference()) error(loc, "buffers can be declared only as blocks", "buffer", ""); + if (qualifier.storage != EvqVaryingIn && publicType.basicType == EbtDouble && + extensionTurnedOn(E_GL_ARB_vertex_attrib_64bit) && language == EShLangVertex && + version < 400) { + profileRequires(loc, ECoreProfile | ECompatibilityProfile, 410, E_GL_ARB_gpu_shader_fp64, "vertex-shader `double` type"); + } if (qualifier.storage != EvqVaryingIn && qualifier.storage != EvqVaryingOut) return; @@ -3377,7 +3490,7 @@ void TParseContext::globalQualifierTypeCheck(const TSourceLoc& loc, const TQuali profileRequires(loc, ENoProfile, 150, nullptr, "vertex input arrays"); } if (publicType.basicType == EbtDouble) - profileRequires(loc, ~EEsProfile, 410, nullptr, "vertex-shader `double` type input"); + profileRequires(loc, ~EEsProfile, 410, E_GL_ARB_vertex_attrib_64bit, "vertex-shader `double` type input"); if (qualifier.isAuxiliary() || qualifier.isInterpolation() || qualifier.isMemory() || qualifier.invariant) error(loc, "vertex input cannot be further qualified", "", ""); break; @@ -3513,12 +3626,14 @@ void TParseContext::mergeQualifiers(const TSourceLoc& loc, TQualifier& dst, cons dst.precision = src.precision; #ifndef GLSLANG_WEB - if (!force && ((src.coherent && (dst.devicecoherent || dst.queuefamilycoherent || dst.workgroupcoherent || dst.subgroupcoherent)) || - (src.devicecoherent && (dst.coherent || dst.queuefamilycoherent || dst.workgroupcoherent || dst.subgroupcoherent)) || - (src.queuefamilycoherent && (dst.coherent || dst.devicecoherent || dst.workgroupcoherent || dst.subgroupcoherent)) || - (src.workgroupcoherent && (dst.coherent || dst.devicecoherent || dst.queuefamilycoherent || dst.subgroupcoherent)) || - (src.subgroupcoherent && (dst.coherent || dst.devicecoherent || dst.queuefamilycoherent || dst.workgroupcoherent)))) { - error(loc, "only one coherent/devicecoherent/queuefamilycoherent/workgroupcoherent/subgroupcoherent qualifier allowed", GetPrecisionQualifierString(src.precision), ""); + if (!force && ((src.coherent && (dst.devicecoherent || dst.queuefamilycoherent || dst.workgroupcoherent || dst.subgroupcoherent || dst.shadercallcoherent)) || + (src.devicecoherent && (dst.coherent || dst.queuefamilycoherent || dst.workgroupcoherent || dst.subgroupcoherent || dst.shadercallcoherent)) || + (src.queuefamilycoherent && (dst.coherent || dst.devicecoherent || dst.workgroupcoherent || dst.subgroupcoherent || dst.shadercallcoherent)) || + (src.workgroupcoherent && (dst.coherent || dst.devicecoherent || dst.queuefamilycoherent || dst.subgroupcoherent || dst.shadercallcoherent)) || + (src.subgroupcoherent && (dst.coherent || dst.devicecoherent || dst.queuefamilycoherent || dst.workgroupcoherent || dst.shadercallcoherent)) || + (src.shadercallcoherent && (dst.coherent || dst.devicecoherent || dst.queuefamilycoherent || dst.workgroupcoherent || dst.subgroupcoherent)))) { + error(loc, "only one coherent/devicecoherent/queuefamilycoherent/workgroupcoherent/subgroupcoherent/shadercallcoherent qualifier allowed", + GetPrecisionQualifierString(src.precision), ""); } #endif // Layout qualifiers @@ -3546,6 +3661,7 @@ void TParseContext::mergeQualifiers(const TSourceLoc& loc, TQualifier& dst, cons MERGE_SINGLETON(queuefamilycoherent); MERGE_SINGLETON(workgroupcoherent); MERGE_SINGLETON(subgroupcoherent); + MERGE_SINGLETON(shadercallcoherent); MERGE_SINGLETON(nonprivate); MERGE_SINGLETON(volatil); MERGE_SINGLETON(restrict); @@ -3983,7 +4099,7 @@ void TParseContext::checkRuntimeSizable(const TSourceLoc& loc, const TIntermType } // check for additional things allowed by GL_EXT_nonuniform_qualifier - if (base.getBasicType() == EbtSampler || base.getBasicType() == EbtAccStructNV || + if (base.getBasicType() == EbtSampler || base.getBasicType() == EbtAccStruct || base.getBasicType() == EbtRayQuery || (base.getBasicType() == EbtBlock && base.getType().getQualifier().isUniformOrBuffer())) requireExtensions(loc, 1, &E_GL_EXT_nonuniform_qualifier, "variable index"); else @@ -4487,6 +4603,7 @@ void TParseContext::paramCheckFix(const TSourceLoc& loc, const TQualifier& quali type.getQualifier().queuefamilycoherent = qualifier.queuefamilycoherent; type.getQualifier().workgroupcoherent = qualifier.workgroupcoherent; type.getQualifier().subgroupcoherent = qualifier.subgroupcoherent; + type.getQualifier().shadercallcoherent = qualifier.shadercallcoherent; type.getQualifier().nonprivate = qualifier.nonprivate; type.getQualifier().readonly = qualifier.readonly; type.getQualifier().writeonly = qualifier.writeonly; @@ -4868,7 +4985,7 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi #ifndef GLSLANG_WEB if (id == TQualifier::getLayoutPackingString(ElpStd430)) { requireProfile(loc, EEsProfile | ECoreProfile | ECompatibilityProfile, "std430"); - profileRequires(loc, ECoreProfile | ECompatibilityProfile, 430, nullptr, "std430"); + profileRequires(loc, ECoreProfile | ECompatibilityProfile, 430, E_GL_ARB_shader_storage_buffer_object, "std430"); profileRequires(loc, EEsProfile, 310, nullptr, "std430"); publicType.qualifier.layoutPacking = ElpStd430; return; @@ -5067,13 +5184,19 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi return; } } else { - if (language == EShLangRayGenNV || language == EShLangIntersectNV || - language == EShLangAnyHitNV || language == EShLangClosestHitNV || - language == EShLangMissNV || language == EShLangCallableNV) { - if (id == "shaderrecordnv") { - publicType.qualifier.layoutShaderRecordNV = true; + if (language == EShLangRayGen || language == EShLangIntersect || + language == EShLangAnyHit || language == EShLangClosestHit || + language == EShLangMiss || language == EShLangCallable) { + if (id == "shaderrecordnv" || id == "shaderrecordext") { + if (id == "shaderrecordnv") { + requireExtensions(loc, 1, &E_GL_NV_ray_tracing, "shader record NV"); + } else { + requireExtensions(loc, 1, &E_GL_EXT_ray_tracing, "shader record EXT"); + } + publicType.qualifier.layoutShaderRecord = true; return; } + } } if (language == EShLangCompute) { @@ -5088,6 +5211,12 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi } } } + + if (id == "primitive_culling") { + requireExtensions(loc, 1, &E_GL_EXT_ray_flags_primitive_culling, "primitive culling"); + publicType.shaderQualifiers.layoutPrimitiveCulling = true; + return; + } #endif error(loc, "unrecognized layout identifier, or qualifier requires assignment (e.g., binding = 4)", id.c_str(), ""); @@ -5135,6 +5264,7 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi profileRequires(loc, EEsProfile, 310, nullptr, feature); } publicType.qualifier.layoutOffset = value; + publicType.qualifier.explicitOffset = true; if (nonLiteral) error(loc, "needs a literal integer", "offset", ""); return; @@ -5347,10 +5477,10 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi case EShLangFragment: if (id == "index") { - requireProfile(loc, ECompatibilityProfile | ECoreProfile, "index layout qualifier on fragment output"); + requireProfile(loc, ECompatibilityProfile | ECoreProfile | EEsProfile, "index layout qualifier on fragment output"); const char* exts[2] = { E_GL_ARB_separate_shader_objects, E_GL_ARB_explicit_attrib_location }; profileRequires(loc, ECompatibilityProfile | ECoreProfile, 330, 2, exts, "index layout qualifier on fragment output"); - + profileRequires(loc, EEsProfile ,310, E_GL_EXT_blend_func_extended, "index layout qualifier on fragment output"); // "It is also a compile-time error if a fragment shader sets a layout index to less than 0 or greater than 1." if (value < 0 || value > 1) { value = 0; @@ -5514,8 +5644,8 @@ void TParseContext::mergeObjectLayoutQualifiers(TQualifier& dst, const TQualifie dst.layoutViewportRelative = true; if (src.layoutSecondaryViewportRelativeOffset != -2048) dst.layoutSecondaryViewportRelativeOffset = src.layoutSecondaryViewportRelativeOffset; - if (src.layoutShaderRecordNV) - dst.layoutShaderRecordNV = true; + if (src.layoutShaderRecord) + dst.layoutShaderRecord = true; if (src.pervertexNV) dst.pervertexNV = true; #endif @@ -5583,7 +5713,7 @@ void TParseContext::layoutObjectCheck(const TSourceLoc& loc, const TSymbol& symb error(loc, "cannot specify on a variable declaration", "align", ""); if (qualifier.isPushConstant()) error(loc, "can only specify on a uniform block", "push_constant", ""); - if (qualifier.isShaderRecordNV()) + if (qualifier.isShaderRecord()) error(loc, "can only specify on a buffer block", "shaderRecordNV", ""); } break; @@ -5657,11 +5787,11 @@ void TParseContext::layoutTypeCheck(const TSourceLoc& loc, const TType& type) error(loc, "cannot apply to uniform or buffer block", "location", ""); break; #ifndef GLSLANG_WEB - case EvqPayloadNV: - case EvqPayloadInNV: - case EvqHitAttrNV: - case EvqCallableDataNV: - case EvqCallableDataInNV: + case EvqPayload: + case EvqPayloadIn: + case EvqHitAttr: + case EvqCallableData: + case EvqCallableDataIn: break; #endif default: @@ -5684,6 +5814,8 @@ void TParseContext::layoutTypeCheck(const TSourceLoc& loc, const TType& type) int repeated = intermediate.addXfbBufferOffset(type); if (repeated >= 0) error(loc, "overlapping offsets at", "xfb_offset", "offset %d in buffer %d", repeated, qualifier.layoutXfbBuffer); + if (type.isUnsizedArray()) + error(loc, "unsized array", "xfb_offset", "in buffer %d", qualifier.layoutXfbBuffer); // "The offset must be a multiple of the size of the first component of the first // qualified variable or block member, or a compile-time error results. Further, if applied to an aggregate @@ -5756,7 +5888,7 @@ void TParseContext::layoutTypeCheck(const TSourceLoc& loc, const TType& type) if (spvVersion.spv > 0) { if (qualifier.isUniformOrBuffer()) { if (type.getBasicType() == EbtBlock && !qualifier.isPushConstant() && - !qualifier.isShaderRecordNV() && + !qualifier.isShaderRecord() && !qualifier.hasAttachment() && !qualifier.hasBufferReference()) error(loc, "uniform/buffer blocks require layout(binding=X)", "binding", ""); @@ -5813,7 +5945,7 @@ void TParseContext::layoutTypeCheck(const TSourceLoc& loc, const TType& type) if (qualifier.hasBufferReference() && type.getBasicType() != EbtBlock) error(loc, "can only be used with a block", "buffer_reference", ""); - if (qualifier.isShaderRecordNV() && type.getBasicType() != EbtBlock) + if (qualifier.isShaderRecord() && type.getBasicType() != EbtBlock) error(loc, "can only be used with a block", "shaderRecordNV", ""); // input attachment @@ -5958,7 +6090,7 @@ void TParseContext::layoutQualifierCheck(const TSourceLoc& loc, const TQualifier if (qualifier.storage != EvqBuffer) error(loc, "can only be used with buffer", "buffer_reference", ""); } - if (qualifier.isShaderRecordNV()) { + if (qualifier.isShaderRecord()) { if (qualifier.storage != EvqBuffer) error(loc, "can only be used with a buffer", "shaderRecordNV", ""); if (qualifier.hasBinding()) @@ -5967,7 +6099,7 @@ void TParseContext::layoutQualifierCheck(const TSourceLoc& loc, const TQualifier error(loc, "cannot be used with shaderRecordNV", "set", ""); } - if (qualifier.storage == EvqHitAttrNV && qualifier.hasLayout()) { + if (qualifier.storage == EvqHitAttr && qualifier.hasLayout()) { error(loc, "cannot apply layout qualifiers to hitAttributeNV variable", "hitAttributeNV", ""); } } @@ -6018,6 +6150,8 @@ void TParseContext::checkNoShaderLayouts(const TSourceLoc& loc, const TShaderQua error(loc, message, "num_views", ""); if (shaderQualifiers.interlockOrdering != EioNone) error(loc, message, TQualifier::getInterlockOrderingString(shaderQualifiers.interlockOrdering), ""); + if (shaderQualifiers.layoutPrimitiveCulling) + error(loc, "can only be applied as standalone", "primitive_culling", ""); #endif } @@ -6079,6 +6213,15 @@ const TFunction* TParseContext::findFunction(const TSourceLoc& loc, const TFunct #endif const TFunction* function = nullptr; + + // debugPrintfEXT has var args and is in the symbol table as "debugPrintfEXT()", + // mangled to "debugPrintfEXT(" + if (call.getName() == "debugPrintfEXT") { + TSymbol* symbol = symbolTable.find("debugPrintfEXT(", &builtIn); + if (symbol) + return symbol->getAsFunction(); + } + bool explicitTypesEnabled = extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types) || extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types_int8) || extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types_int16) || @@ -6088,7 +6231,10 @@ const TFunction* TParseContext::findFunction(const TSourceLoc& loc, const TFunct extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types_float32) || extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types_float64); - if (isEsProfile() || version < 120) + if (isEsProfile()) + function = (extensionTurnedOn(E_GL_EXT_shader_implicit_conversions) && version >= 310) ? + findFunction120(loc, call, builtIn) : findFunctionExact(loc, call, builtIn); + else if (version < 120) function = findFunctionExact(loc, call, builtIn); else if (version < 400) function = extensionTurnedOn(E_GL_ARB_gpu_shader_fp64) ? findFunction400(loc, call, builtIn) : findFunction120(loc, call, builtIn); @@ -6422,6 +6568,12 @@ TIntermNode* TParseContext::declareVariable(const TSourceLoc& loc, TString& iden type.copyArrayInnerSizes(publicType.arraySizes); arrayOfArrayVersionCheck(loc, type.getArraySizes()); + if (initializer) { + if (type.getBasicType() == EbtRayQuery) { + error(loc, "ray queries can only be initialized by using the rayQueryInitializeEXT intrinsic:", "=", identifier.c_str()); + } + } + if (type.isCoopMat()) { intermediate.setUseVulkanMemoryModel(); intermediate.setUseStorageBuffer(); @@ -6461,7 +6613,7 @@ TIntermNode* TParseContext::declareVariable(const TSourceLoc& loc, TString& iden transparentOpaqueCheck(loc, type, identifier); #ifndef GLSLANG_WEB atomicUintCheck(loc, type, identifier); - accStructNVCheck(loc, type, identifier); + accStructCheck(loc, type, identifier); checkAndResizeMeshViewDim(loc, type, /*isBlockMember*/ false); #endif if (type.getQualifier().storage == EvqConst && type.containsReference()) { @@ -7177,6 +7329,8 @@ TIntermTyped* TParseContext::constructBuiltIn(const TType& type, TOperator op, T if (!node->getType().isCoopMat()) { if (type.getBasicType() != node->getType().getBasicType()) { node = intermediate.addConversion(type.getBasicType(), node); + if (node == nullptr) + return nullptr; } node = intermediate.setAggregateOperator(node, EOpConstructCooperativeMatrix, type, node->getLoc()); } else { @@ -7355,8 +7509,8 @@ void TParseContext::declareBlock(const TSourceLoc& loc, TTypeList& typeList, con arraySizesCheck(memberLoc, currentBlockQualifier, memberType.getArraySizes(), nullptr, member == typeList.size() - 1); if (memberQualifier.hasOffset()) { if (spvVersion.spv == 0) { - requireProfile(memberLoc, ~EEsProfile, "offset on block member"); - profileRequires(memberLoc, ~EEsProfile, 440, E_GL_ARB_enhanced_layouts, "offset on block member"); + profileRequires(memberLoc, ~EEsProfile, 440, E_GL_ARB_enhanced_layouts, "\"offset\" on block member"); + profileRequires(memberLoc, EEsProfile, 300, E_GL_ARB_enhanced_layouts, "\"offset\" on block member"); } } @@ -7395,7 +7549,7 @@ void TParseContext::declareBlock(const TSourceLoc& loc, TTypeList& typeList, con // Special case for "push_constant uniform", which has a default of std430, // contrary to normal uniform defaults, and can't have a default tracked for it. if ((currentBlockQualifier.isPushConstant() && !currentBlockQualifier.hasPacking()) || - (currentBlockQualifier.isShaderRecordNV() && !currentBlockQualifier.hasPacking())) + (currentBlockQualifier.isShaderRecord() && !currentBlockQualifier.hasPacking())) currentBlockQualifier.layoutPacking = ElpStd430; // Special case for "taskNV in/out", which has a default of std430, @@ -7495,6 +7649,8 @@ void TParseContext::declareBlock(const TSourceLoc& loc, TTypeList& typeList, con fixBlockLocations(loc, currentBlockQualifier, typeList, memberWithLocation, memberWithoutLocation); fixXfbOffsets(currentBlockQualifier, typeList); fixBlockUniformOffsets(currentBlockQualifier, typeList); + fixBlockUniformLayoutMatrix(currentBlockQualifier, &typeList, nullptr); + fixBlockUniformLayoutPacking(currentBlockQualifier, &typeList, nullptr); for (unsigned int member = 0; member < typeList.size(); ++member) layoutTypeCheck(typeList[member].loc, *typeList[member].type); @@ -7612,6 +7768,7 @@ void TParseContext::declareBlock(const TSourceLoc& loc, TTypeList& typeList, con // with a particular stage. void TParseContext::blockStageIoCheck(const TSourceLoc& loc, const TQualifier& qualifier) { + const char *extsrt[2] = { E_GL_NV_ray_tracing, E_GL_EXT_ray_tracing }; switch (qualifier.storage) { case EvqUniform: profileRequires(loc, EEsProfile, 300, nullptr, "uniform block"); @@ -7621,7 +7778,7 @@ void TParseContext::blockStageIoCheck(const TSourceLoc& loc, const TQualifier& q break; case EvqBuffer: requireProfile(loc, EEsProfile | ECoreProfile | ECompatibilityProfile, "buffer block"); - profileRequires(loc, ECoreProfile | ECompatibilityProfile, 430, nullptr, "buffer block"); + profileRequires(loc, ECoreProfile | ECompatibilityProfile, 430, E_GL_ARB_shader_storage_buffer_object, "buffer block"); profileRequires(loc, EEsProfile, 310, nullptr, "buffer block"); break; case EvqVaryingIn: @@ -7650,28 +7807,28 @@ void TParseContext::blockStageIoCheck(const TSourceLoc& loc, const TQualifier& q } break; #ifndef GLSLANG_WEB - case EvqPayloadNV: - profileRequires(loc, ~EEsProfile, 460, E_GL_NV_ray_tracing, "rayPayloadNV block"); - requireStage(loc, (EShLanguageMask)(EShLangRayGenNVMask | EShLangAnyHitNVMask | EShLangClosestHitNVMask | EShLangMissNVMask), + case EvqPayload: + profileRequires(loc, ~EEsProfile, 460, 2, extsrt, "rayPayloadNV block"); + requireStage(loc, (EShLanguageMask)(EShLangRayGenMask | EShLangAnyHitMask | EShLangClosestHitMask | EShLangMissMask), "rayPayloadNV block"); break; - case EvqPayloadInNV: - profileRequires(loc, ~EEsProfile, 460, E_GL_NV_ray_tracing, "rayPayloadInNV block"); - requireStage(loc, (EShLanguageMask)(EShLangAnyHitNVMask | EShLangClosestHitNVMask | EShLangMissNVMask), + case EvqPayloadIn: + profileRequires(loc, ~EEsProfile, 460, 2, extsrt, "rayPayloadInNV block"); + requireStage(loc, (EShLanguageMask)(EShLangAnyHitMask | EShLangClosestHitMask | EShLangMissMask), "rayPayloadInNV block"); break; - case EvqHitAttrNV: - profileRequires(loc, ~EEsProfile, 460, E_GL_NV_ray_tracing, "hitAttributeNV block"); - requireStage(loc, (EShLanguageMask)(EShLangIntersectNVMask | EShLangAnyHitNVMask | EShLangClosestHitNVMask), "hitAttributeNV block"); + case EvqHitAttr: + profileRequires(loc, ~EEsProfile, 460, 2, extsrt, "hitAttributeNV block"); + requireStage(loc, (EShLanguageMask)(EShLangIntersectMask | EShLangAnyHitMask | EShLangClosestHitMask), "hitAttributeNV block"); break; - case EvqCallableDataNV: - profileRequires(loc, ~EEsProfile, 460, E_GL_NV_ray_tracing, "callableDataNV block"); - requireStage(loc, (EShLanguageMask)(EShLangRayGenNVMask | EShLangClosestHitNVMask | EShLangMissNVMask | EShLangCallableNVMask), + case EvqCallableData: + profileRequires(loc, ~EEsProfile, 460, 2, extsrt, "callableDataNV block"); + requireStage(loc, (EShLanguageMask)(EShLangRayGenMask | EShLangClosestHitMask | EShLangMissMask | EShLangCallableMask), "callableDataNV block"); break; - case EvqCallableDataInNV: - profileRequires(loc, ~EEsProfile, 460, E_GL_NV_ray_tracing, "callableDataInNV block"); - requireStage(loc, (EShLanguageMask)(EShLangCallableNVMask), "callableDataInNV block"); + case EvqCallableDataIn: + profileRequires(loc, ~EEsProfile, 460, 2, extsrt, "callableDataInNV block"); + requireStage(loc, (EShLanguageMask)(EShLangCallableMask), "callableDataInNV block"); break; #endif default: @@ -7710,8 +7867,8 @@ void TParseContext::blockQualifierCheck(const TSourceLoc& loc, const TQualifier& error(loc, "cannot use invariant qualifier on an interface block", "invariant", ""); if (qualifier.isPushConstant()) intermediate.addPushConstantCount(); - if (qualifier.isShaderRecordNV()) - intermediate.addShaderRecordNVCount(); + if (qualifier.isShaderRecord()) + intermediate.addShaderRecordCount(); if (qualifier.isTaskMemory()) intermediate.addTaskNVCount(); } @@ -7864,6 +8021,101 @@ void TParseContext::fixBlockUniformOffsets(TQualifier& qualifier, TTypeList& typ } } +// +// Spread LayoutMatrix to uniform block member, if a uniform block member is a struct, +// we need spread LayoutMatrix to this struct member too. and keep this rule for recursive. +// +void TParseContext::fixBlockUniformLayoutMatrix(TQualifier& qualifier, TTypeList* originTypeList, + TTypeList* tmpTypeList) +{ + assert(tmpTypeList == nullptr || originTypeList->size() == tmpTypeList->size()); + for (unsigned int member = 0; member < originTypeList->size(); ++member) { + if (qualifier.layoutPacking != ElpNone) { + if (tmpTypeList == nullptr) { + if (((*originTypeList)[member].type->isMatrix() || + (*originTypeList)[member].type->getBasicType() == EbtStruct) && + (*originTypeList)[member].type->getQualifier().layoutMatrix == ElmNone) { + (*originTypeList)[member].type->getQualifier().layoutMatrix = qualifier.layoutMatrix; + } + } else { + if (((*tmpTypeList)[member].type->isMatrix() || + (*tmpTypeList)[member].type->getBasicType() == EbtStruct) && + (*tmpTypeList)[member].type->getQualifier().layoutMatrix == ElmNone) { + (*tmpTypeList)[member].type->getQualifier().layoutMatrix = qualifier.layoutMatrix; + } + } + } + + if ((*originTypeList)[member].type->getBasicType() == EbtStruct) { + TQualifier* memberQualifier = nullptr; + // block member can be declare a matrix style, so it should be update to the member's style + if ((*originTypeList)[member].type->getQualifier().layoutMatrix == ElmNone) { + memberQualifier = &qualifier; + } else { + memberQualifier = &((*originTypeList)[member].type->getQualifier()); + } + + const TType* tmpType = tmpTypeList == nullptr ? + (*originTypeList)[member].type->clone() : (*tmpTypeList)[member].type; + + fixBlockUniformLayoutMatrix(*memberQualifier, (*originTypeList)[member].type->getWritableStruct(), + tmpType->getWritableStruct()); + + const TTypeList* structure = recordStructCopy(matrixFixRecord, (*originTypeList)[member].type, tmpType); + + if (tmpTypeList == nullptr) { + (*originTypeList)[member].type->setStruct(const_cast(structure)); + } + if (tmpTypeList != nullptr) { + (*tmpTypeList)[member].type->setStruct(const_cast(structure)); + } + } + } +} + +// +// Spread LayoutPacking to block member, if a block member is a struct, we need spread LayoutPacking to +// this struct member too. and keep this rule for recursive. +// +void TParseContext::fixBlockUniformLayoutPacking(TQualifier& qualifier, TTypeList* originTypeList, + TTypeList* tmpTypeList) +{ + assert(tmpTypeList == nullptr || originTypeList->size() == tmpTypeList->size()); + for (unsigned int member = 0; member < originTypeList->size(); ++member) { + if (qualifier.layoutPacking != ElpNone) { + if (tmpTypeList == nullptr) { + if ((*originTypeList)[member].type->getQualifier().layoutPacking == ElpNone) { + (*originTypeList)[member].type->getQualifier().layoutPacking = qualifier.layoutPacking; + } + } else { + if ((*tmpTypeList)[member].type->getQualifier().layoutPacking == ElpNone) { + (*tmpTypeList)[member].type->getQualifier().layoutPacking = qualifier.layoutPacking; + } + } + } + + if ((*originTypeList)[member].type->getBasicType() == EbtStruct) { + // Deep copy the type in pool. + // Because, struct use in different block may have different layout qualifier. + // We have to new a object to distinguish between them. + const TType* tmpType = tmpTypeList == nullptr ? + (*originTypeList)[member].type->clone() : (*tmpTypeList)[member].type; + + fixBlockUniformLayoutPacking(qualifier, (*originTypeList)[member].type->getWritableStruct(), + tmpType->getWritableStruct()); + + const TTypeList* structure = recordStructCopy(packingFixRecord, (*originTypeList)[member].type, tmpType); + + if (tmpTypeList == nullptr) { + (*originTypeList)[member].type->setStruct(const_cast(structure)); + } + if (tmpTypeList != nullptr) { + (*tmpTypeList)[member].type->setStruct(const_cast(structure)); + } + } + } +} + // For an identifier that is already declared, add more qualification to it. void TParseContext::addQualifierToExisting(const TSourceLoc& loc, TQualifier qualifier, const TString& identifier) { @@ -8172,6 +8424,16 @@ void TParseContext::updateStandaloneQualifierDefaults(const TSourceLoc& loc, con { checkIoArraysConsistency(loc); } + + if (publicType.shaderQualifiers.layoutPrimitiveCulling) { + if (publicType.qualifier.storage != EvqTemporary) + error(loc, "layout qualifier can not have storage qualifiers", "primitive_culling","", ""); + else { + intermediate.setLayoutPrimitiveCulling(); + } + // Exit early as further checks are not valid + return; + } #endif const TQualifier& qualifier = publicType.qualifier; @@ -8233,7 +8495,7 @@ void TParseContext::updateStandaloneQualifierDefaults(const TSourceLoc& loc, con error(loc, "cannot declare a default, can only be used on a block", "buffer_reference", ""); if (qualifier.hasSpecConstantId()) error(loc, "cannot declare a default, can only be used on a scalar", "constant_id", ""); - if (qualifier.isShaderRecordNV()) + if (qualifier.isShaderRecord()) error(loc, "cannot declare a default, can only be used on a block", "shaderRecordNV", ""); } @@ -8322,5 +8584,43 @@ TIntermNode* TParseContext::addSwitch(const TSourceLoc& loc, TIntermTyped* expre return switchNode; } +// +// When a struct used in block, and has it's own layout packing, layout matrix, +// record the origin structure of a struct to map, and Record the structure copy to the copy table, +// +const TTypeList* TParseContext::recordStructCopy(TStructRecord& record, const TType* originType, const TType* tmpType) +{ + size_t memberCount = tmpType->getStruct()->size(); + size_t originHash = 0, tmpHash = 0; + std::hash hasher; + for (size_t i = 0; i < memberCount; i++) { + size_t originMemberHash = hasher(originType->getStruct()->at(i).type->getQualifier().layoutPacking + + originType->getStruct()->at(i).type->getQualifier().layoutMatrix); + size_t tmpMemberHash = hasher(tmpType->getStruct()->at(i).type->getQualifier().layoutPacking + + tmpType->getStruct()->at(i).type->getQualifier().layoutMatrix); + originHash = hasher((originHash ^ originMemberHash) << 1); + tmpHash = hasher((tmpHash ^ tmpMemberHash) << 1); + } + const TTypeList* originStruct = originType->getStruct(); + const TTypeList* tmpStruct = tmpType->getStruct(); + if (originHash != tmpHash) { + auto fixRecords = record.find(originStruct); + if (fixRecords != record.end()) { + auto fixRecord = fixRecords->second.find(tmpHash); + if (fixRecord != fixRecords->second.end()) { + return fixRecord->second; + } else { + record[originStruct][tmpHash] = tmpStruct; + return tmpStruct; + } + } else { + record[originStruct] = std::map(); + record[originStruct][tmpHash] = tmpStruct; + return tmpStruct; + } + } + return originStruct; +} + } // end namespace glslang diff --git a/thirdparty/glslang/glslang/MachineIndependent/ParseHelper.h b/thirdparty/glslang/glslang/MachineIndependent/ParseHelper.h index 39363f1a2a7..0f09adaffcd 100644 --- a/thirdparty/glslang/glslang/MachineIndependent/ParseHelper.h +++ b/thirdparty/glslang/glslang/MachineIndependent/ParseHelper.h @@ -68,6 +68,7 @@ class TScanContext; class TPpContext; typedef std::set TIdSetType; +typedef std::map> TStructRecord; // // Sharable code (as well as what's in TParseVersions) across @@ -83,6 +84,7 @@ public: scopeMangler("::"), symbolTable(symbolTable), statementNestingLevel(0), loopNestingLevel(0), structNestingLevel(0), controlFlowNestingLevel(0), + currentFunctionType(nullptr), postEntryPointReturn(false), contextPragma(true, false), beginInvocationInterlockCount(0), endInvocationInterlockCount(0), @@ -315,6 +317,7 @@ public: TIntermTyped* handleBinaryMath(const TSourceLoc&, const char* str, TOperator op, TIntermTyped* left, TIntermTyped* right); TIntermTyped* handleUnaryMath(const TSourceLoc&, const char* str, TOperator op, TIntermTyped* childNode); TIntermTyped* handleDotDereference(const TSourceLoc&, TIntermTyped* base, const TString& field); + TIntermTyped* handleDotSwizzle(const TSourceLoc&, TIntermTyped* base, const TString& field); void blockMemberExtensionCheck(const TSourceLoc&, const TIntermTyped* base, int member, const TString& memberName); TFunction* handleFunctionDeclarator(const TSourceLoc&, TFunction& function, bool prototype); TIntermAggregate* handleFunctionDefinition(const TSourceLoc&, TFunction&); @@ -326,6 +329,7 @@ public: TIntermTyped* handleLengthMethod(const TSourceLoc&, TFunction*, TIntermNode*); void addInputArgumentConversions(const TFunction&, TIntermNode*&) const; TIntermTyped* addOutputArgumentConversions(const TFunction&, TIntermAggregate&) const; + TIntermTyped* addAssign(const TSourceLoc&, TOperator op, TIntermTyped* left, TIntermTyped* right); void builtInOpCheck(const TSourceLoc&, const TFunction&, TIntermOperator&); void nonOpBuiltInCheck(const TSourceLoc&, const TFunction&, TIntermAggregate&); void userFunctionCallCheck(const TSourceLoc&, TIntermAggregate&); @@ -358,7 +362,7 @@ public: void boolCheck(const TSourceLoc&, const TPublicType&); void samplerCheck(const TSourceLoc&, const TType&, const TString& identifier, TIntermTyped* initializer); void atomicUintCheck(const TSourceLoc&, const TType&, const TString& identifier); - void accStructNVCheck(const TSourceLoc & loc, const TType & type, const TString & identifier); + void accStructCheck(const TSourceLoc & loc, const TType & type, const TString & identifier); void transparentOpaqueCheck(const TSourceLoc&, const TType&, const TString& identifier); void memberQualifierCheck(glslang::TPublicType&); void globalQualifierFixCheck(const TSourceLoc&, TQualifier&); @@ -417,12 +421,15 @@ public: void fixBlockLocations(const TSourceLoc&, TQualifier&, TTypeList&, bool memberWithLocation, bool memberWithoutLocation); void fixXfbOffsets(TQualifier&, TTypeList&); void fixBlockUniformOffsets(TQualifier&, TTypeList&); + void fixBlockUniformLayoutMatrix(TQualifier&, TTypeList*, TTypeList*); + void fixBlockUniformLayoutPacking(TQualifier&, TTypeList*, TTypeList*); void addQualifierToExisting(const TSourceLoc&, TQualifier, const TString& identifier); void addQualifierToExisting(const TSourceLoc&, TQualifier, TIdentifierList&); void invariantCheck(const TSourceLoc&, const TQualifier&); void updateStandaloneQualifierDefaults(const TSourceLoc&, const TPublicType&); void wrapupSwitchSubsequence(TIntermAggregate* statements, TIntermNode* branchNode); TIntermNode* addSwitch(const TSourceLoc&, TIntermTyped* expression, TIntermAggregate* body); + const TTypeList* recordStructCopy(TStructRecord&, const TType*, const TType*); #ifndef GLSLANG_WEB TAttributeType attributeFromName(const TString& name) const; @@ -483,6 +490,8 @@ protected: bool anyIndexLimits; TIdSetType inductiveLoopIds; TVector needsIndexLimitationChecking; + TStructRecord matrixFixRecord; + TStructRecord packingFixRecord; // // Geometry shader input arrays: diff --git a/thirdparty/glslang/glslang/MachineIndependent/Scan.cpp b/thirdparty/glslang/glslang/MachineIndependent/Scan.cpp index fd18fd4d7d9..bd3181a74f7 100644 --- a/thirdparty/glslang/glslang/MachineIndependent/Scan.cpp +++ b/thirdparty/glslang/glslang/MachineIndependent/Scan.cpp @@ -2,6 +2,8 @@ // Copyright (C) 2002-2005 3Dlabs Inc. Ltd. // Copyright (C) 2013 LunarG, Inc. // Copyright (C) 2017 ARM Limited. +// Copyright (C) 2020 Google, Inc. +// Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved. // // All rights reserved. // @@ -187,17 +189,15 @@ bool TInputScanner::scanVersion(int& version, EProfile& profile, bool& notFirstT if (lookingInMiddle) { notFirstToken = true; // make forward progress by finishing off the current line plus extra new lines - if (peek() == '\n' || peek() == '\r') { - while (peek() == '\n' || peek() == '\r') - get(); - } else + if (peek() != '\n' && peek() != '\r') { do { c = get(); } while (c != EndOfInput && c != '\n' && c != '\r'); - while (peek() == '\n' || peek() == '\r') - get(); - if (peek() == EndOfInput) - return true; + } + while (peek() == '\n' || peek() == '\r') + get(); + if (peek() == EndOfInput) + return true; } lookingInMiddle = true; @@ -416,6 +416,7 @@ void TScanContext::fillInKeywordMap() (*KeywordMap)["queuefamilycoherent"] = QUEUEFAMILYCOHERENT; (*KeywordMap)["workgroupcoherent"] = WORKGROUPCOHERENT; (*KeywordMap)["subgroupcoherent"] = SUBGROUPCOHERENT; + (*KeywordMap)["shadercallcoherent"] = SHADERCALLCOHERENT; (*KeywordMap)["nonprivate"] = NONPRIVATE; (*KeywordMap)["restrict"] = RESTRICT; (*KeywordMap)["readonly"] = READONLY; @@ -704,11 +705,18 @@ void TScanContext::fillInKeywordMap() (*KeywordMap)["precise"] = PRECISE; (*KeywordMap)["rayPayloadNV"] = PAYLOADNV; + (*KeywordMap)["rayPayloadEXT"] = PAYLOADEXT; (*KeywordMap)["rayPayloadInNV"] = PAYLOADINNV; + (*KeywordMap)["rayPayloadInEXT"] = PAYLOADINEXT; (*KeywordMap)["hitAttributeNV"] = HITATTRNV; + (*KeywordMap)["hitAttributeEXT"] = HITATTREXT; (*KeywordMap)["callableDataNV"] = CALLDATANV; + (*KeywordMap)["callableDataEXT"] = CALLDATAEXT; (*KeywordMap)["callableDataInNV"] = CALLDATAINNV; + (*KeywordMap)["callableDataInEXT"] = CALLDATAINEXT; (*KeywordMap)["accelerationStructureNV"] = ACCSTRUCTNV; + (*KeywordMap)["accelerationStructureEXT"] = ACCSTRUCTEXT; + (*KeywordMap)["rayQueryEXT"] = RAYQUERYEXT; (*KeywordMap)["perprimitiveNV"] = PERPRIMITIVENV; (*KeywordMap)["perviewNV"] = PERVIEWNV; (*KeywordMap)["taskNV"] = PERTASKNV; @@ -843,6 +851,7 @@ int TScanContext::tokenize(TPpContext* pp, TParserToken& token) parseContext.error(loc, "not supported", "::", ""); break; + case PpAtomConstString: parserToken->sType.lex.string = NewPoolTString(tokenText); return STRING_LITERAL; case PpAtomConstInt: parserToken->sType.lex.i = ppToken.ival; return INTCONSTANT; case PpAtomConstUint: parserToken->sType.lex.i = ppToken.ival; return UINTCONSTANT; case PpAtomConstFloat: parserToken->sType.lex.d = ppToken.dval; return FLOATCONSTANT; @@ -908,7 +917,8 @@ int TScanContext::tokenizeIdentifier() case BUFFER: afterBuffer = true; if ((parseContext.isEsProfile() && parseContext.version < 310) || - (!parseContext.isEsProfile() && parseContext.version < 430)) + (!parseContext.isEsProfile() && (parseContext.version < 430 && + !parseContext.extensionTurnedOn(E_GL_ARB_shader_storage_buffer_object)))) return identifierOrType(); return keyword; @@ -1014,6 +1024,23 @@ int TScanContext::tokenizeIdentifier() parseContext.extensionTurnedOn(E_GL_NV_ray_tracing)) return keyword; return identifierOrType(); + case PAYLOADEXT: + case PAYLOADINEXT: + case HITATTREXT: + case CALLDATAEXT: + case CALLDATAINEXT: + case ACCSTRUCTEXT: + if (parseContext.symbolTable.atBuiltInLevel() || + parseContext.extensionTurnedOn(E_GL_EXT_ray_tracing) || + parseContext.extensionTurnedOn(E_GL_EXT_ray_query)) + return keyword; + return identifierOrType(); + case RAYQUERYEXT: + if (parseContext.symbolTable.atBuiltInLevel() || + (!parseContext.isEsProfile() && parseContext.version >= 460 + && parseContext.extensionTurnedOn(E_GL_EXT_ray_query))) + return keyword; + return identifierOrType(); case ATOMIC_UINT: if ((parseContext.isEsProfile() && parseContext.version >= 310) || parseContext.extensionTurnedOn(E_GL_ARB_shader_atomic_counters)) @@ -1025,6 +1052,7 @@ int TScanContext::tokenizeIdentifier() case QUEUEFAMILYCOHERENT: case WORKGROUPCOHERENT: case SUBGROUPCOHERENT: + case SHADERCALLCOHERENT: case NONPRIVATE: case RESTRICT: case READONLY: @@ -1168,8 +1196,8 @@ int TScanContext::tokenizeIdentifier() afterType = true; if (parseContext.isEsProfile() || parseContext.version < 150 || (!parseContext.symbolTable.atBuiltInLevel() && - parseContext.version < 400 && - !parseContext.extensionTurnedOn(E_GL_ARB_gpu_shader_fp64))) + (parseContext.version < 400 && !parseContext.extensionTurnedOn(E_GL_ARB_gpu_shader_fp64) && + (parseContext.version < 410 && !parseContext.extensionTurnedOn(E_GL_ARB_vertex_attrib_64bit))))) reservedWord(); return keyword; @@ -1746,7 +1774,9 @@ int TScanContext::dMat() if (!parseContext.isEsProfile() && (parseContext.version >= 400 || parseContext.symbolTable.atBuiltInLevel() || - (parseContext.version >= 150 && parseContext.extensionTurnedOn(E_GL_ARB_gpu_shader_fp64)))) + (parseContext.version >= 150 && parseContext.extensionTurnedOn(E_GL_ARB_gpu_shader_fp64)) || + (parseContext.version >= 150 && parseContext.extensionTurnedOn(E_GL_ARB_vertex_attrib_64bit) + && parseContext.language == EShLangVertex))) return keyword; if (parseContext.isForwardCompatible()) diff --git a/thirdparty/glslang/glslang/MachineIndependent/ShaderLang.cpp b/thirdparty/glslang/glslang/MachineIndependent/ShaderLang.cpp index 44ce1c19d1a..9d7f37b0981 100644 --- a/thirdparty/glslang/glslang/MachineIndependent/ShaderLang.cpp +++ b/thirdparty/glslang/glslang/MachineIndependent/ShaderLang.cpp @@ -1,7 +1,7 @@ // // Copyright (C) 2002-2005 3Dlabs Inc. Ltd. // Copyright (C) 2013-2016 LunarG, Inc. -// Copyright (C) 2015-2018 Google, Inc. +// Copyright (C) 2015-2020 Google, Inc. // // All rights reserved. // @@ -51,9 +51,9 @@ #include "ScanContext.h" #ifdef ENABLE_HLSL -#include "../../hlsl/hlslParseHelper.h" -#include "../../hlsl/hlslParseables.h" -#include "../../hlsl/hlslScanContext.h" +#include "../HLSL/hlslParseHelper.h" +#include "../HLSL/hlslParseables.h" +#include "../HLSL/hlslScanContext.h" #endif #include "../Include/ShHandle.h" @@ -72,6 +72,9 @@ // token to print ", but none of that seems appropriate for this file. #include "preprocessor/PpTokens.h" +// Build-time generated includes +#include "glslang/build_info.h" + namespace { // anonymous namespace for file-local functions and symbols // Total number of successful initializers of glslang: a refcount @@ -291,6 +294,9 @@ void InitializeStageSymbolTable(TBuiltInParseables& builtInParseables, int versi #ifdef GLSLANG_WEB profile = EEsProfile; version = 310; +#elif defined(GLSLANG_ANGLE) + profile = ECoreProfile; + version = 450; #endif (*symbolTables[language]).adoptLevels(*commonTable[CommonIndex(profile, language)]); @@ -312,6 +318,9 @@ bool InitializeSymbolTables(TInfoSink& infoSink, TSymbolTable** commonTable, TS #ifdef GLSLANG_WEB profile = EEsProfile; version = 310; +#elif defined(GLSLANG_ANGLE) + profile = ECoreProfile; + version = 450; #endif std::unique_ptr builtInParseables(CreateBuiltInParseables(infoSink, source)); @@ -351,7 +360,6 @@ bool InitializeSymbolTables(TInfoSink& infoSink, TSymbolTable** commonTable, TS (profile == EEsProfile && version >= 310)) InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangGeometry, source, infoSink, commonTable, symbolTables); -#endif // check for compute if ((profile != EEsProfile && version >= 420) || @@ -359,19 +367,20 @@ bool InitializeSymbolTables(TInfoSink& infoSink, TSymbolTable** commonTable, TS InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangCompute, source, infoSink, commonTable, symbolTables); +#ifndef GLSLANG_ANGLE // check for ray tracing stages if (profile != EEsProfile && version >= 450) { - InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangRayGenNV, source, + InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangRayGen, source, infoSink, commonTable, symbolTables); - InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangIntersectNV, source, + InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangIntersect, source, infoSink, commonTable, symbolTables); - InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangAnyHitNV, source, + InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangAnyHit, source, infoSink, commonTable, symbolTables); - InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangClosestHitNV, source, + InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangClosestHit, source, infoSink, commonTable, symbolTables); - InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangMissNV, source, + InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangMiss, source, infoSink, commonTable, symbolTables); - InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangCallableNV, source, + InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangCallable, source, infoSink, commonTable, symbolTables); } @@ -386,6 +395,8 @@ bool InitializeSymbolTables(TInfoSink& infoSink, TSymbolTable** commonTable, TS (profile == EEsProfile && version >= 320)) InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangTaskNV, source, infoSink, commonTable, symbolTables); +#endif // !GLSLANG_ANGLE +#endif // !GLSLANG_WEB return true; } @@ -487,7 +498,7 @@ void SetupBuiltinSymbolTable(int version, EProfile profile, const SpvVersion& sp // Function to Print all builtins void DumpBuiltinSymbolTable(TInfoSink& infoSink, const TSymbolTable& symbolTable) { -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) infoSink.debug << "BuiltinSymbolTable {\n"; symbolTable.dump(infoSink, true); @@ -591,7 +602,7 @@ bool DeduceVersionProfile(TInfoSink& infoSink, EShLanguage stage, bool versionNo break; } -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) // Correct for stage type... switch (stage) { case EShLangGeometry: @@ -623,12 +634,12 @@ bool DeduceVersionProfile(TInfoSink& infoSink, EShLanguage stage, bool versionNo version = profile == EEsProfile ? 310 : 420; } break; - case EShLangRayGenNV: - case EShLangIntersectNV: - case EShLangAnyHitNV: - case EShLangClosestHitNV: - case EShLangMissNV: - case EShLangCallableNV: + case EShLangRayGen: + case EShLangIntersect: + case EShLangAnyHit: + case EShLangClosestHit: + case EShLangMiss: + case EShLangCallable: if (profile == EEsProfile || version < 460) { correct = false; infoSink.info.message(EPrefixError, "#version: ray tracing shaders require non-es profile with version 460 or above"); @@ -716,6 +727,9 @@ void TranslateEnvironment(const TEnvironment* environment, EShMessages& messages case EShClientOpenGL: spvVersion.openGl = environment->input.dialectVersion; break; + case EShClientCount: + assert(0); + break; } switch (environment->input.languageFamily) { case EShSourceNone: @@ -728,6 +742,9 @@ void TranslateEnvironment(const TEnvironment* environment, EShMessages& messages source = EShSourceHlsl; messages = static_cast(messages | EShMsgReadHlsl); break; + case EShSourceCount: + assert(0); + break; } } @@ -861,7 +878,7 @@ bool ProcessDeferred( : userInput.scanVersion(version, profile, versionNotFirstToken); bool versionNotFound = version == 0; if (forceDefaultVersionAndProfile && source == EShSourceGlsl) { -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) if (! (messages & EShMsgSuppressWarnings) && ! versionNotFound && (version != defaultVersion || profile != defaultProfile)) { compiler->infoSink.info << "Warning, (version, profile) forced to be (" @@ -884,10 +901,13 @@ bool ProcessDeferred( #ifdef GLSLANG_WEB profile = EEsProfile; version = 310; +#elif defined(GLSLANG_ANGLE) + profile = ECoreProfile; + version = 450; #endif bool versionWillBeError = (versionNotFound || (profile == EEsProfile && version >= 300 && versionNotFirst)); -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) bool warnVersionNotFirst = false; if (! versionWillBeError && versionNotFirstToken) { if (messages & EShMsgRelaxedErrors) @@ -957,7 +977,7 @@ bool ProcessDeferred( parseContext->setLimits(*resources); if (! goodVersion) parseContext->addError(); -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) if (warnVersionNotFirst) { TSourceLoc loc; loc.init(); @@ -994,7 +1014,7 @@ bool ProcessDeferred( return success; } -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) // Responsible for keeping track of the most recent source string and line in // the preprocessor and outputting newlines appropriately if the source string @@ -1217,14 +1237,16 @@ struct DoFullParse{ parseContext.infoSink.info << parseContext.getNumErrors() << " compilation errors. No code generated.\n\n"; } +#ifndef GLSLANG_ANGLE if (messages & EShMsgAST) intermediate.output(parseContext.infoSink, true); +#endif return success; } }; -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) // Take a single compilation unit, and run the preprocessor on it. // Return: True if there were no issues found in preprocessing, // False if during preprocessing any unknown version, pragmas or @@ -1678,19 +1700,29 @@ int ShGetUniformLocation(const ShHandle handle, const char* name) namespace glslang { -#include "../Include/revision.h" +Version GetVersion() +{ + Version version; + version.major = GLSLANG_VERSION_MAJOR; + version.minor = GLSLANG_VERSION_MINOR; + version.patch = GLSLANG_VERSION_PATCH; + version.flavor = GLSLANG_VERSION_FLAVOR; + return version; +} #define QUOTE(s) #s #define STR(n) QUOTE(n) const char* GetEsslVersionString() { - return "OpenGL ES GLSL 3.20 glslang Khronos. " STR(GLSLANG_MINOR_VERSION) "." STR(GLSLANG_PATCH_LEVEL); + return "OpenGL ES GLSL 3.20 glslang Khronos. " STR(GLSLANG_VERSION_MAJOR) "." STR(GLSLANG_VERSION_MINOR) "." STR( + GLSLANG_VERSION_PATCH) GLSLANG_VERSION_FLAVOR; } const char* GetGlslVersionString() { - return "4.60 glslang Khronos. " STR(GLSLANG_MINOR_VERSION) "." STR(GLSLANG_PATCH_LEVEL); + return "4.60 glslang Khronos. " STR(GLSLANG_VERSION_MAJOR) "." STR(GLSLANG_VERSION_MINOR) "." STR( + GLSLANG_VERSION_PATCH) GLSLANG_VERSION_FLAVOR; } int GetKhronosToolId() @@ -1771,6 +1803,8 @@ void TShader::setSourceEntryPoint(const char* name) sourceEntryPointName = name; } +// Log initial settings and transforms. +// See comment for class TProcesses. void TShader::addProcesses(const std::vector& p) { intermediate->addProcesses(p); @@ -1852,7 +1886,7 @@ bool TShader::parse(const TBuiltInResource* builtInResources, int defaultVersion &environment); } -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) // Fill in a string with the result of preprocessing ShaderStrings // Returns true if all extensions, pragmas and version strings were valid. // @@ -1890,7 +1924,7 @@ const char* TShader::getInfoDebugLog() } TProgram::TProgram() : -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) reflection(0), #endif linked(false) @@ -1906,7 +1940,7 @@ TProgram::TProgram() : TProgram::~TProgram() { delete infoSink; -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) delete reflection; #endif @@ -1953,7 +1987,7 @@ bool TProgram::linkStage(EShLanguage stage, EShMessages messages) if (stages[stage].size() == 0) return true; -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) int numEsShaders = 0, numNonEsShaders = 0; for (auto it = stages[stage].begin(); it != stages[stage].end(); ++it) { if ((*it)->intermediate->getProfile() == EEsProfile) { @@ -2007,8 +2041,10 @@ bool TProgram::linkStage(EShLanguage stage, EShMessages messages) #endif intermediate[stage]->finalCheck(*infoSink, (messages & EShMsgKeepUncalled) != 0); +#ifndef GLSLANG_ANGLE if (messages & EShMsgAST) intermediate[stage]->output(*infoSink, true); +#endif return intermediate[stage]->getNumErrors() == 0; } @@ -2023,7 +2059,7 @@ const char* TProgram::getInfoDebugLog() return infoSink->debug.c_str(); } -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) // // Reflection implementation. @@ -2105,6 +2141,6 @@ bool TProgram::mapIO(TIoMapResolver* pResolver, TIoMapper* pIoMapper) return ioMapper->doMap(pResolver, *infoSink); } -#endif // GLSLANG_WEB +#endif // !GLSLANG_WEB && !GLSLANG_ANGLE } // end namespace glslang diff --git a/thirdparty/glslang/glslang/MachineIndependent/SymbolTable.cpp b/thirdparty/glslang/glslang/MachineIndependent/SymbolTable.cpp index 44682379f72..007f22c1b4a 100644 --- a/thirdparty/glslang/glslang/MachineIndependent/SymbolTable.cpp +++ b/thirdparty/glslang/glslang/MachineIndependent/SymbolTable.cpp @@ -3,6 +3,7 @@ // Copyright (C) 2012-2013 LunarG, Inc. // Copyright (C) 2017 ARM Limited. // Copyright (C) 2015-2018 Google, Inc. +// Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved. // // All rights reserved. // @@ -74,7 +75,8 @@ void TType::buildMangledName(TString& mangledName) const case EbtInt64: mangledName += "i64"; break; case EbtUint64: mangledName += "u64"; break; case EbtAtomicUint: mangledName += "au"; break; - case EbtAccStructNV: mangledName += "asnv"; break; + case EbtAccStruct: mangledName += "as"; break; + case EbtRayQuery: mangledName += "rq"; break; #endif case EbtSampler: switch (sampler.type) { @@ -120,7 +122,7 @@ void TType::buildMangledName(TString& mangledName) const mangledName += "-tx-struct"; char text[16]; // plenty enough space for the small integers. - snprintf(text, sizeof(text), "%d-", sampler.getStructReturnIndex()); + snprintf(text, sizeof(text), "%u-", sampler.getStructReturnIndex()); mangledName += text; } else { switch (sampler.getVectorSize()) { @@ -176,7 +178,7 @@ void TType::buildMangledName(TString& mangledName) const } } -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) // // Dump functions. diff --git a/thirdparty/glslang/glslang/MachineIndependent/SymbolTable.h b/thirdparty/glslang/glslang/MachineIndependent/SymbolTable.h index 40ca3da532c..ec4bc3c599b 100644 --- a/thirdparty/glslang/glslang/MachineIndependent/SymbolTable.h +++ b/thirdparty/glslang/glslang/MachineIndependent/SymbolTable.h @@ -117,7 +117,7 @@ public: virtual int getNumExtensions() const { return extensions == nullptr ? 0 : (int)extensions->size(); } virtual const char** getExtensions() const { return extensions->data(); } -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) virtual void dump(TInfoSink& infoSink, bool complete = false) const = 0; void dumpExtensions(TInfoSink& infoSink) const; #endif @@ -196,7 +196,7 @@ public: } virtual const char** getMemberExtensions(int member) const { return (*memberExtensions)[member].data(); } -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) virtual void dump(TInfoSink& infoSink, bool complete = false) const; #endif @@ -319,7 +319,7 @@ public: virtual TParameter& operator[](int i) { assert(writable); return parameters[i]; } virtual const TParameter& operator[](int i) const { return parameters[i]; } -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) virtual void dump(TInfoSink& infoSink, bool complete = false) const override; #endif @@ -381,7 +381,7 @@ public: virtual const char** getExtensions() const override { return anonContainer.getMemberExtensions(memberNumber); } virtual int getAnonId() const { return anonId; } -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) virtual void dump(TInfoSink& infoSink, bool complete = false) const override; #endif @@ -551,7 +551,7 @@ public: void relateToOperator(const char* name, TOperator op); void setFunctionExtensions(const char* name, int num, const char* const extensions[]); -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) void dump(TInfoSink& infoSink, bool complete = false) const; #endif TSymbolTableLevel* clone() const; @@ -854,7 +854,7 @@ public: } int getMaxSymbolId() { return uniqueId; } -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) void dump(TInfoSink& infoSink, bool complete = false) const; #endif void copyTable(const TSymbolTable& copyOf); diff --git a/thirdparty/glslang/glslang/MachineIndependent/Versions.cpp b/thirdparty/glslang/glslang/MachineIndependent/Versions.cpp index e549074df8a..896fd5ae20b 100644 --- a/thirdparty/glslang/glslang/MachineIndependent/Versions.cpp +++ b/thirdparty/glslang/glslang/MachineIndependent/Versions.cpp @@ -2,7 +2,8 @@ // Copyright (C) 2002-2005 3Dlabs Inc. Ltd. // Copyright (C) 2012-2013 LunarG, Inc. // Copyright (C) 2017 ARM Limited. -// Copyright (C) 2015-2018 Google, Inc. +// Copyright (C) 2015-2020 Google, Inc. +// Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved. // // All rights reserved. // @@ -63,6 +64,7 @@ // checkDeprecated() // requireNotRemoved() // requireExtensions() +// extensionRequires() // // Typically, only the first two calls are needed. They go into a code path that // implements Feature F, and will log the proper error/warning messages. Parsing @@ -77,9 +79,11 @@ // const char* const XXX_extension_X = "XXX_extension_X"; // // 2) Add extension initialization to TParseVersions::initializeExtensionBehavior(), -// the first function below: +// the first function below and optionally a entry to extensionData for additional +// error checks: // // extensionBehavior[XXX_extension_X] = EBhDisable; +// (Optional) exts[] = {XXX_extension_X, EShTargetSpv_1_4} // // 3) Add any preprocessor directives etc. in the next function, TParseVersions::getPreamble(): // @@ -139,6 +143,8 @@ // set of extensions that both enable them and are necessary, given the version of the symbol // table. (There is a different symbol table for each version.) // +// 7) If the extension has additional requirements like minimum SPIR-V version required, add them +// to extensionRequires() #include "parseVersions.h" #include "localintermediate.h" @@ -154,6 +160,20 @@ namespace glslang { // void TParseVersions::initializeExtensionBehavior() { + typedef struct { + const char *const extensionName; + EShTargetLanguageVersion minSpvVersion; + } extensionData; + + const extensionData exts[] = { {E_GL_EXT_ray_tracing, EShTargetSpv_1_4} }; + + for (size_t ii = 0; ii < sizeof(exts) / sizeof(exts[0]); ii++) { + // Add only extensions which require > spv1.0 to save space in map + if (exts[ii].minSpvVersion > EShTargetSpv_1_0) { + extensionMinSpv[E_GL_EXT_ray_tracing] = exts[ii].minSpvVersion; + } + } + extensionBehavior[E_GL_OES_texture_3D] = EBhDisable; extensionBehavior[E_GL_OES_standard_derivatives] = EBhDisable; extensionBehavior[E_GL_EXT_frag_depth] = EBhDisable; @@ -196,6 +216,12 @@ void TParseVersions::initializeExtensionBehavior() extensionBehavior[E_GL_ARB_shader_clock] = EBhDisable; extensionBehavior[E_GL_ARB_uniform_buffer_object] = EBhDisable; extensionBehavior[E_GL_ARB_sample_shading] = EBhDisable; + extensionBehavior[E_GL_ARB_shader_bit_encoding] = EBhDisable; + extensionBehavior[E_GL_ARB_shader_image_size] = EBhDisable; + extensionBehavior[E_GL_ARB_shader_storage_buffer_object] = EBhDisable; + extensionBehavior[E_GL_ARB_shading_language_packing] = EBhDisable; + extensionBehavior[E_GL_ARB_texture_query_lod] = EBhDisable; + extensionBehavior[E_GL_ARB_vertex_attrib_64bit] = EBhDisable; extensionBehavior[E_GL_KHR_shader_subgroup_basic] = EBhDisable; extensionBehavior[E_GL_KHR_shader_subgroup_vote] = EBhDisable; @@ -221,6 +247,7 @@ void TParseVersions::initializeExtensionBehavior() extensionBehavior[E_GL_EXT_buffer_reference2] = EBhDisable; extensionBehavior[E_GL_EXT_buffer_reference_uvec2] = EBhDisable; extensionBehavior[E_GL_EXT_demote_to_helper_invocation] = EBhDisable; + extensionBehavior[E_GL_EXT_debug_printf] = EBhDisable; extensionBehavior[E_GL_EXT_shader_16bit_storage] = EBhDisable; extensionBehavior[E_GL_EXT_shader_8bit_storage] = EBhDisable; @@ -289,11 +316,17 @@ void TParseVersions::initializeExtensionBehavior() extensionBehavior[E_GL_OES_tessellation_point_size] = EBhDisable; extensionBehavior[E_GL_OES_texture_buffer] = EBhDisable; extensionBehavior[E_GL_OES_texture_cube_map_array] = EBhDisable; + extensionBehavior[E_GL_EXT_shader_integer_mix] = EBhDisable; // EXT extensions - extensionBehavior[E_GL_EXT_device_group] = EBhDisable; - extensionBehavior[E_GL_EXT_multiview] = EBhDisable; - extensionBehavior[E_GL_EXT_shader_realtime_clock] = EBhDisable; + extensionBehavior[E_GL_EXT_device_group] = EBhDisable; + extensionBehavior[E_GL_EXT_multiview] = EBhDisable; + extensionBehavior[E_GL_EXT_shader_realtime_clock] = EBhDisable; + extensionBehavior[E_GL_EXT_ray_tracing] = EBhDisable; + extensionBehavior[E_GL_EXT_ray_query] = EBhDisable; + extensionBehavior[E_GL_EXT_ray_flags_primitive_culling] = EBhDisable; + extensionBehavior[E_GL_EXT_blend_func_extended] = EBhDisable; + extensionBehavior[E_GL_EXT_shader_implicit_conversions] = EBhDisable; // OVR extensions extensionBehavior[E_GL_OVR_multiview] = EBhDisable; @@ -314,7 +347,9 @@ void TParseVersions::initializeExtensionBehavior() extensionBehavior[E_GL_EXT_shader_subgroup_extended_types_int16] = EBhDisable; extensionBehavior[E_GL_EXT_shader_subgroup_extended_types_int64] = EBhDisable; extensionBehavior[E_GL_EXT_shader_subgroup_extended_types_float16] = EBhDisable; + extensionBehavior[E_GL_EXT_shader_atomic_float] = EBhDisable; } + #endif // GLSLANG_WEB // Get code that is not part of a shared symbol table, is specific to this shader, @@ -352,6 +387,9 @@ void TParseVersions::getPreamble(std::string& preamble) "#define GL_EXT_tessellation_point_size 1\n" "#define GL_EXT_texture_buffer 1\n" "#define GL_EXT_texture_cube_map_array 1\n" + "#define GL_EXT_shader_implicit_conversions 1\n" + "#define GL_EXT_shader_integer_mix 1\n" + "#define GL_EXT_blend_func_extended 1\n" // OES matching AEP "#define GL_OES_geometry_shader 1\n" @@ -400,10 +438,16 @@ void TParseVersions::getPreamble(std::string& preamble) "#define GL_ARB_sparse_texture_clamp 1\n" "#define GL_ARB_shader_stencil_export 1\n" "#define GL_ARB_sample_shading 1\n" + "#define GL_ARB_shader_image_size 1\n" + "#define GL_ARB_shading_language_packing 1\n" // "#define GL_ARB_cull_distance 1\n" // present for 4.5, but need extension control over block members "#define GL_ARB_post_depth_coverage 1\n" "#define GL_ARB_fragment_shader_interlock 1\n" "#define GL_ARB_uniform_buffer_object 1\n" + "#define GL_ARB_shader_bit_encoding 1\n" + "#define GL_ARB_shader_storage_buffer_object 1\n" + "#define GL_ARB_texture_query_lod 1\n" + "#define GL_ARB_vertex_attrib_64bit 1\n" "#define GL_EXT_shader_non_constant_global_initializers 1\n" "#define GL_EXT_shader_image_load_formatted 1\n" "#define GL_EXT_post_depth_coverage 1\n" @@ -418,6 +462,7 @@ void TParseVersions::getPreamble(std::string& preamble) "#define GL_EXT_buffer_reference2 1\n" "#define GL_EXT_buffer_reference_uvec2 1\n" "#define GL_EXT_demote_to_helper_invocation 1\n" + "#define GL_EXT_debug_printf 1\n" // GL_KHR_shader_subgroup "#define GL_KHR_shader_subgroup_basic 1\n" @@ -429,8 +474,11 @@ void TParseVersions::getPreamble(std::string& preamble) "#define GL_KHR_shader_subgroup_clustered 1\n" "#define GL_KHR_shader_subgroup_quad 1\n" - "#define E_GL_EXT_shader_atomic_int64 1\n" - "#define E_GL_EXT_shader_realtime_clock 1\n" + "#define GL_EXT_shader_atomic_int64 1\n" + "#define GL_EXT_shader_realtime_clock 1\n" + "#define GL_EXT_ray_tracing 1\n" + "#define GL_EXT_ray_query 1\n" + "#define GL_EXT_ray_flags_primitive_culling 1\n" "#define GL_AMD_shader_ballot 1\n" "#define GL_AMD_shader_trinary_minmax 1\n" @@ -473,6 +521,8 @@ void TParseVersions::getPreamble(std::string& preamble) "#define GL_EXT_shader_subgroup_extended_types_int16 1\n" "#define GL_EXT_shader_subgroup_extended_types_int64 1\n" "#define GL_EXT_shader_subgroup_extended_types_float16 1\n" + + "#define GL_EXT_shader_atomic_float 1\n" ; if (version >= 150) { @@ -544,12 +594,12 @@ const char* StageName(EShLanguage stage) case EShLangTessControl: return "tessellation control"; case EShLangTessEvaluation: return "tessellation evaluation"; case EShLangGeometry: return "geometry"; - case EShLangRayGenNV: return "ray-generation"; - case EShLangIntersectNV: return "intersection"; - case EShLangAnyHitNV: return "any-hit"; - case EShLangClosestHitNV: return "closest-hit"; - case EShLangMissNV: return "miss"; - case EShLangCallableNV: return "callable"; + case EShLangRayGen: return "ray-generation"; + case EShLangIntersect: return "intersection"; + case EShLangAnyHit: return "any-hit"; + case EShLangClosestHit: return "closest-hit"; + case EShLangMiss: return "miss"; + case EShLangCallable: return "callable"; case EShLangMeshNV: return "mesh"; case EShLangTaskNV: return "task"; #endif @@ -712,7 +762,8 @@ bool TParseVersions::checkExtensionsRequested(const TSourceLoc& loc, int numExte // Use when there are no profile/version to check, it's just an error if one of the // extensions is not present. // -void TParseVersions::requireExtensions(const TSourceLoc& loc, int numExtensions, const char* const extensions[], const char* featureDesc) +void TParseVersions::requireExtensions(const TSourceLoc& loc, int numExtensions, const char* const extensions[], + const char* featureDesc) { if (checkExtensionsRequested(loc, numExtensions, extensions, featureDesc)) return; @@ -731,7 +782,8 @@ void TParseVersions::requireExtensions(const TSourceLoc& loc, int numExtensions, // Use by preprocessor when there are no profile/version to check, it's just an error if one of the // extensions is not present. // -void TParseVersions::ppRequireExtensions(const TSourceLoc& loc, int numExtensions, const char* const extensions[], const char* featureDesc) +void TParseVersions::ppRequireExtensions(const TSourceLoc& loc, int numExtensions, const char* const extensions[], + const char* featureDesc) { if (checkExtensionsRequested(loc, numExtensions, extensions, featureDesc)) return; @@ -797,10 +849,14 @@ void TParseVersions::updateExtensionBehavior(int line, const char* extension, co error(getCurrentLoc(), "behavior not supported:", "#extension", behaviorString); return; } + bool on = behavior != EBhDisable; // check if extension is used with correct shader stage checkExtensionStage(getCurrentLoc(), extension); + // check if extension has additional requirements + extensionRequires(getCurrentLoc(), extension ,behaviorString); + // update the requested extension updateExtensionBehavior(extension, behavior); @@ -863,6 +919,32 @@ void TParseVersions::updateExtensionBehavior(int line, const char* extension, co updateExtensionBehavior(line, "GL_EXT_shader_explicit_arithmetic_types_int64", behaviorString); else if (strcmp(extension, "GL_EXT_shader_subgroup_extended_types_float16") == 0) updateExtensionBehavior(line, "GL_EXT_shader_explicit_arithmetic_types_float16", behaviorString); + + // see if we need to update the numeric features + else if (strcmp(extension, "GL_EXT_shader_explicit_arithmetic_types") == 0) + intermediate.updateNumericFeature(TNumericFeatures::shader_explicit_arithmetic_types, on); + else if (strcmp(extension, "GL_EXT_shader_explicit_arithmetic_types_int8") == 0) + intermediate.updateNumericFeature(TNumericFeatures::shader_explicit_arithmetic_types_int8, on); + else if (strcmp(extension, "GL_EXT_shader_explicit_arithmetic_types_int16") == 0) + intermediate.updateNumericFeature(TNumericFeatures::shader_explicit_arithmetic_types_int16, on); + else if (strcmp(extension, "GL_EXT_shader_explicit_arithmetic_types_int32") == 0) + intermediate.updateNumericFeature(TNumericFeatures::shader_explicit_arithmetic_types_int32, on); + else if (strcmp(extension, "GL_EXT_shader_explicit_arithmetic_types_int64") == 0) + intermediate.updateNumericFeature(TNumericFeatures::shader_explicit_arithmetic_types_int64, on); + else if (strcmp(extension, "GL_EXT_shader_explicit_arithmetic_types_float16") == 0) + intermediate.updateNumericFeature(TNumericFeatures::shader_explicit_arithmetic_types_float16, on); + else if (strcmp(extension, "GL_EXT_shader_explicit_arithmetic_types_float32") == 0) + intermediate.updateNumericFeature(TNumericFeatures::shader_explicit_arithmetic_types_float32, on); + else if (strcmp(extension, "GL_EXT_shader_explicit_arithmetic_types_float64") == 0) + intermediate.updateNumericFeature(TNumericFeatures::shader_explicit_arithmetic_types_float64, on); + else if (strcmp(extension, "GL_EXT_shader_implicit_conversions") == 0) + intermediate.updateNumericFeature(TNumericFeatures::shader_implicit_conversions, on); + else if (strcmp(extension, "GL_ARB_gpu_shader_fp64") == 0) + intermediate.updateNumericFeature(TNumericFeatures::gpu_shader_fp64, on); + else if (strcmp(extension, "GL_AMD_gpu_shader_int16") == 0) + intermediate.updateNumericFeature(TNumericFeatures::gpu_shader_int16, on); + else if (strcmp(extension, "GL_AMD_gpu_shader_half_float") == 0) + intermediate.updateNumericFeature(TNumericFeatures::gpu_shader_half_float, on); } void TParseVersions::updateExtensionBehavior(const char* extension, TExtensionBehavior behavior) @@ -898,7 +980,7 @@ void TParseVersions::updateExtensionBehavior(const char* extension, TExtensionBe } else { if (iter->second == EBhDisablePartial) warn(getCurrentLoc(), "extension is only partially supported:", "#extension", extension); - if (behavior == EBhEnable || behavior == EBhRequire) + if (behavior != EBhDisable) intermediate.addRequestedExtension(extension); iter->second = behavior; } @@ -917,6 +999,24 @@ void TParseVersions::checkExtensionStage(const TSourceLoc& loc, const char * con } } +// Check if extension has additional requirements +void TParseVersions::extensionRequires(const TSourceLoc &loc, const char * const extension, const char *behaviorString) +{ + bool isEnabled = false; + if (!strcmp("require", behaviorString)) + isEnabled = true; + else if (!strcmp("enable", behaviorString)) + isEnabled = true; + + if (isEnabled) { + unsigned int minSpvVersion = 0; + auto iter = extensionMinSpv.find(TString(extension)); + if (iter != extensionMinSpv.end()) + minSpvVersion = iter->second; + requireSpv(loc, extension, minSpvVersion); + } +} + // Call for any operation needing full GLSL integer data-type support. void TParseVersions::fullIntegerCheck(const TSourceLoc& loc, const char* op) { @@ -927,8 +1027,13 @@ void TParseVersions::fullIntegerCheck(const TSourceLoc& loc, const char* op) // Call for any operation needing GLSL double data-type support. void TParseVersions::doubleCheck(const TSourceLoc& loc, const char* op) { + //requireProfile(loc, ECoreProfile | ECompatibilityProfile, op); - profileRequires(loc, ECoreProfile | ECompatibilityProfile, 400, E_GL_ARB_gpu_shader_fp64, op); + if (language == EShLangVertex) { + const char* const f64_Extensions[] = {E_GL_ARB_gpu_shader_fp64, E_GL_ARB_vertex_attrib_64bit}; + profileRequires(loc, ECoreProfile | ECompatibilityProfile, 400, 2, f64_Extensions, op); + } else + profileRequires(loc, ECoreProfile | ECompatibilityProfile, 400, E_GL_ARB_gpu_shader_fp64, op); } // Call for any operation needing GLSL float16 data-type support. @@ -1169,5 +1274,12 @@ void TParseVersions::requireSpv(const TSourceLoc& loc, const char* op) error(loc, "only allowed when generating SPIR-V", op, ""); #endif } +void TParseVersions::requireSpv(const TSourceLoc& loc, const char *op, unsigned int version) +{ +#ifndef GLSLANG_WEB + if (spvVersion.spv < version) + error(loc, "not supported for current targeted SPIR-V version", op, ""); +#endif +} } // end namespace glslang diff --git a/thirdparty/glslang/glslang/MachineIndependent/Versions.h b/thirdparty/glslang/glslang/MachineIndependent/Versions.h index 58558e595a1..f52f605ea8d 100644 --- a/thirdparty/glslang/glslang/MachineIndependent/Versions.h +++ b/thirdparty/glslang/glslang/MachineIndependent/Versions.h @@ -3,6 +3,7 @@ // Copyright (C) 2012-2013 LunarG, Inc. // Copyright (C) 2017 ARM Limited. // Copyright (C) 2015-2018 Google, Inc. +// Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved. // // All rights reserved. // @@ -35,9 +36,12 @@ // ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. // + #ifndef _VERSIONS_INCLUDED_ #define _VERSIONS_INCLUDED_ +#define LAST_ELEMENT_MARKER(x) x + // // Help manage multiple profiles, versions, extensions etc. // @@ -49,12 +53,13 @@ // Don't maintain an ordinal set of enums (0,1,2,3...) to avoid all possible // defects from mixing the two different forms. // -typedef enum { +typedef enum : unsigned { EBadProfile = 0, ENoProfile = (1 << 0), // only for desktop, before profiles showed up ECoreProfile = (1 << 1), ECompatibilityProfile = (1 << 2), - EEsProfile = (1 << 3) + EEsProfile = (1 << 3), + LAST_ELEMENT_MARKER(EProfileCount), } EProfile; namespace glslang { @@ -148,6 +153,12 @@ const char* const E_GL_ARB_fragment_shader_interlock = "GL_ARB_fragment_shade const char* const E_GL_ARB_shader_clock = "GL_ARB_shader_clock"; const char* const E_GL_ARB_uniform_buffer_object = "GL_ARB_uniform_buffer_object"; const char* const E_GL_ARB_sample_shading = "GL_ARB_sample_shading"; +const char* const E_GL_ARB_shader_bit_encoding = "GL_ARB_shader_bit_encoding"; +const char* const E_GL_ARB_shader_image_size = "GL_ARB_shader_image_size"; +const char* const E_GL_ARB_shader_storage_buffer_object = "GL_ARB_shader_storage_buffer_object"; +const char* const E_GL_ARB_shading_language_packing = "GL_ARB_shading_language_packing"; +const char* const E_GL_ARB_texture_query_lod = "GL_ARB_texture_query_lod"; +const char* const E_GL_ARB_vertex_attrib_64bit = "GL_ARB_vertex_attrib_64bit"; const char* const E_GL_KHR_shader_subgroup_basic = "GL_KHR_shader_subgroup_basic"; const char* const E_GL_KHR_shader_subgroup_vote = "GL_KHR_shader_subgroup_vote"; @@ -182,6 +193,12 @@ const char* const E_GL_EXT_buffer_reference2 = "GL_EXT_buffer_ref const char* const E_GL_EXT_buffer_reference_uvec2 = "GL_EXT_buffer_reference_uvec2"; const char* const E_GL_EXT_demote_to_helper_invocation = "GL_EXT_demote_to_helper_invocation"; const char* const E_GL_EXT_shader_realtime_clock = "GL_EXT_shader_realtime_clock"; +const char* const E_GL_EXT_debug_printf = "GL_EXT_debug_printf"; +const char* const E_GL_EXT_ray_tracing = "GL_EXT_ray_tracing"; +const char* const E_GL_EXT_ray_query = "GL_EXT_ray_query"; +const char* const E_GL_EXT_ray_flags_primitive_culling = "GL_EXT_ray_flags_primitive_culling"; +const char* const E_GL_EXT_blend_func_extended = "GL_EXT_blend_func_extended"; +const char* const E_GL_EXT_shader_implicit_conversions = "GL_EXT_shader_implicit_conversions"; // Arrays of extensions for the above viewportEXTs duplications @@ -253,6 +270,7 @@ const char* const E_GL_EXT_tessellation_shader = "GL_EXT_tessel const char* const E_GL_EXT_tessellation_point_size = "GL_EXT_tessellation_point_size"; const char* const E_GL_EXT_texture_buffer = "GL_EXT_texture_buffer"; const char* const E_GL_EXT_texture_cube_map_array = "GL_EXT_texture_cube_map_array"; +const char* const E_GL_EXT_shader_integer_mix = "GL_EXT_shader_integer_mix"; // OES matching AEP const char* const E_GL_OES_geometry_shader = "GL_OES_geometry_shader"; @@ -280,6 +298,8 @@ const char* const E_GL_EXT_shader_subgroup_extended_types_int16 = "GL_EXT_shad const char* const E_GL_EXT_shader_subgroup_extended_types_int64 = "GL_EXT_shader_subgroup_extended_types_int64"; const char* const E_GL_EXT_shader_subgroup_extended_types_float16 = "GL_EXT_shader_subgroup_extended_types_float16"; +const char* const E_GL_EXT_shader_atomic_float = "GL_EXT_shader_atomic_float"; + // Arrays of extensions for the above AEP duplications const char* const AEP_geometry_shader[] = { E_GL_EXT_geometry_shader, E_GL_OES_geometry_shader }; diff --git a/thirdparty/glslang/glslang/MachineIndependent/gl_types.h b/thirdparty/glslang/glslang/MachineIndependent/gl_types.h index b6f613bcedc..b9372d4bbbf 100644 --- a/thirdparty/glslang/glslang/MachineIndependent/gl_types.h +++ b/thirdparty/glslang/glslang/MachineIndependent/gl_types.h @@ -52,6 +52,14 @@ #define GL_UNSIGNED_INT64_VEC2_ARB 0x8FE5 #define GL_UNSIGNED_INT64_VEC3_ARB 0x8FE6 #define GL_UNSIGNED_INT64_VEC4_ARB 0x8FE7 +#define GL_UNSIGNED_INT16_VEC2_NV 0x8FF1 +#define GL_UNSIGNED_INT16_VEC3_NV 0x8FF2 +#define GL_UNSIGNED_INT16_VEC4_NV 0x8FF3 + +#define GL_INT16_NV 0x8FE4 +#define GL_INT16_VEC2_NV 0x8FE5 +#define GL_INT16_VEC3_NV 0x8FE6 +#define GL_INT16_VEC4_NV 0x8FE7 #define GL_BOOL 0x8B56 #define GL_BOOL_VEC2 0x8B57 diff --git a/thirdparty/glslang/glslang/MachineIndependent/glslang.y b/thirdparty/glslang/glslang/MachineIndependent/glslang.y index 9f30fdb2ab4..23adcb057e6 100644 --- a/thirdparty/glslang/glslang/MachineIndependent/glslang.y +++ b/thirdparty/glslang/glslang/MachineIndependent/glslang.y @@ -2,7 +2,8 @@ // Copyright (C) 2002-2005 3Dlabs Inc. Ltd. // Copyright (C) 2012-2013 LunarG, Inc. // Copyright (C) 2017 ARM Limited. -// Copyright (C) 2015-2018 Google, Inc. +// Copyright (C) 2015-2019 Google, Inc. +// Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved. // // All rights reserved. // @@ -204,6 +205,8 @@ extern int yylex(YYSTYPE*, TParseContext&); %token F64MAT4X2 F64MAT4X3 F64MAT4X4 %token ATOMIC_UINT %token ACCSTRUCTNV +%token ACCSTRUCTEXT +%token RAYQUERYEXT %token FCOOPMATNV ICOOPMATNV UCOOPMATNV // combined image/sampler @@ -263,6 +266,7 @@ extern int yylex(YYSTYPE*, TParseContext&); %token AND_OP OR_OP XOR_OP MUL_ASSIGN DIV_ASSIGN ADD_ASSIGN %token MOD_ASSIGN LEFT_ASSIGN RIGHT_ASSIGN AND_ASSIGN XOR_ASSIGN OR_ASSIGN %token SUB_ASSIGN +%token STRING_LITERAL %token LEFT_PAREN RIGHT_PAREN LEFT_BRACKET RIGHT_BRACKET LEFT_BRACE RIGHT_BRACE DOT %token COMMA COLON EQUAL SEMICOLON BANG DASH TILDE PLUS STAR SLASH PERCENT @@ -285,9 +289,10 @@ extern int yylex(YYSTYPE*, TParseContext&); %token INT64CONSTANT UINT64CONSTANT %token SUBROUTINE DEMOTE %token PAYLOADNV PAYLOADINNV HITATTRNV CALLDATANV CALLDATAINNV +%token PAYLOADEXT PAYLOADINEXT HITATTREXT CALLDATAEXT CALLDATAINEXT %token PATCH SAMPLE NONUNIFORM %token COHERENT VOLATILE RESTRICT READONLY WRITEONLY DEVICECOHERENT QUEUEFAMILYCOHERENT WORKGROUPCOHERENT -%token SUBGROUPCOHERENT NONPRIVATE +%token SUBGROUPCOHERENT NONPRIVATE SHADERCALLCOHERENT %token NOPERSPECTIVE EXPLICITINTERPAMD PERVERTEXNV PERPRIMITIVENV PERVIEWNV PERTASKNV %token PRECISE @@ -377,6 +382,9 @@ primary_expression $$ = parseContext.intermediate.addConstantUnion($1.b, $1.loc, true); } + | STRING_LITERAL { + $$ = parseContext.intermediate.addConstantUnion($1.string, $1.loc, true); + } | INT32CONSTANT { parseContext.explicitInt32Check($1.loc, "32-bit signed literal"); $$ = parseContext.intermediate.addConstantUnion($1.i, $1.loc, true); @@ -770,7 +778,7 @@ assignment_expression parseContext.specializationCheck($2.loc, $1->getType(), "="); parseContext.lValueErrorCheck($2.loc, "assign", $1); parseContext.rValueErrorCheck($2.loc, "assign", $3); - $$ = parseContext.intermediate.addAssign($2.op, $1, $3, $2.loc); + $$ = parseContext.addAssign($2.loc, $2.op, $1, $3); if ($$ == 0) { parseContext.assignError($2.loc, "assign", $1->getCompleteString(), $3->getCompleteString()); $$ = $1; @@ -1415,42 +1423,81 @@ storage_qualifier } | HITATTRNV { parseContext.globalCheck($1.loc, "hitAttributeNV"); - parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangIntersectNVMask | EShLangClosestHitNVMask - | EShLangAnyHitNVMask), "hitAttributeNV"); + parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangIntersectMask | EShLangClosestHitMask + | EShLangAnyHitMask), "hitAttributeNV"); parseContext.profileRequires($1.loc, ECoreProfile, 460, E_GL_NV_ray_tracing, "hitAttributeNV"); $$.init($1.loc); - $$.qualifier.storage = EvqHitAttrNV; + $$.qualifier.storage = EvqHitAttr; + } + | HITATTREXT { + parseContext.globalCheck($1.loc, "hitAttributeEXT"); + parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangIntersectMask | EShLangClosestHitMask + | EShLangAnyHitMask), "hitAttributeEXT"); + parseContext.profileRequires($1.loc, ECoreProfile, 460, E_GL_EXT_ray_tracing, "hitAttributeNV"); + $$.init($1.loc); + $$.qualifier.storage = EvqHitAttr; } | PAYLOADNV { parseContext.globalCheck($1.loc, "rayPayloadNV"); - parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangRayGenNVMask | EShLangClosestHitNVMask | - EShLangAnyHitNVMask | EShLangMissNVMask), "rayPayloadNV"); + parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangRayGenMask | EShLangClosestHitMask | + EShLangAnyHitMask | EShLangMissMask), "rayPayloadNV"); parseContext.profileRequires($1.loc, ECoreProfile, 460, E_GL_NV_ray_tracing, "rayPayloadNV"); $$.init($1.loc); - $$.qualifier.storage = EvqPayloadNV; + $$.qualifier.storage = EvqPayload; + } + | PAYLOADEXT { + parseContext.globalCheck($1.loc, "rayPayloadEXT"); + parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangRayGenMask | EShLangClosestHitMask | + EShLangAnyHitMask | EShLangMissMask), "rayPayloadEXT"); + parseContext.profileRequires($1.loc, ECoreProfile, 460, E_GL_EXT_ray_tracing, "rayPayloadEXT"); + $$.init($1.loc); + $$.qualifier.storage = EvqPayload; } | PAYLOADINNV { parseContext.globalCheck($1.loc, "rayPayloadInNV"); - parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangClosestHitNVMask | - EShLangAnyHitNVMask | EShLangMissNVMask), "rayPayloadInNV"); + parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangClosestHitMask | + EShLangAnyHitMask | EShLangMissMask), "rayPayloadInNV"); parseContext.profileRequires($1.loc, ECoreProfile, 460, E_GL_NV_ray_tracing, "rayPayloadInNV"); $$.init($1.loc); - $$.qualifier.storage = EvqPayloadInNV; + $$.qualifier.storage = EvqPayloadIn; + } + | PAYLOADINEXT { + parseContext.globalCheck($1.loc, "rayPayloadInEXT"); + parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangClosestHitMask | + EShLangAnyHitMask | EShLangMissMask), "rayPayloadInEXT"); + parseContext.profileRequires($1.loc, ECoreProfile, 460, E_GL_EXT_ray_tracing, "rayPayloadInEXT"); + $$.init($1.loc); + $$.qualifier.storage = EvqPayloadIn; } | CALLDATANV { parseContext.globalCheck($1.loc, "callableDataNV"); - parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangRayGenNVMask | - EShLangClosestHitNVMask | EShLangMissNVMask | EShLangCallableNVMask), "callableDataNV"); + parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangRayGenMask | + EShLangClosestHitMask | EShLangMissMask | EShLangCallableMask), "callableDataNV"); parseContext.profileRequires($1.loc, ECoreProfile, 460, E_GL_NV_ray_tracing, "callableDataNV"); $$.init($1.loc); - $$.qualifier.storage = EvqCallableDataNV; + $$.qualifier.storage = EvqCallableData; + } + | CALLDATAEXT { + parseContext.globalCheck($1.loc, "callableDataEXT"); + parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangRayGenMask | + EShLangClosestHitMask | EShLangMissMask | EShLangCallableMask), "callableDataEXT"); + parseContext.profileRequires($1.loc, ECoreProfile, 460, E_GL_EXT_ray_tracing, "callableDataEXT"); + $$.init($1.loc); + $$.qualifier.storage = EvqCallableData; } | CALLDATAINNV { parseContext.globalCheck($1.loc, "callableDataInNV"); - parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangCallableNVMask), "callableDataInNV"); + parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangCallableMask), "callableDataInNV"); parseContext.profileRequires($1.loc, ECoreProfile, 460, E_GL_NV_ray_tracing, "callableDataInNV"); $$.init($1.loc); - $$.qualifier.storage = EvqCallableDataInNV; + $$.qualifier.storage = EvqCallableDataIn; + } + | CALLDATAINEXT { + parseContext.globalCheck($1.loc, "callableDataInEXT"); + parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangCallableMask), "callableDataInEXT"); + parseContext.profileRequires($1.loc, ECoreProfile, 460, E_GL_EXT_ray_tracing, "callableDataInEXT"); + $$.init($1.loc); + $$.qualifier.storage = EvqCallableDataIn; } | COHERENT { $$.init($1.loc); @@ -1481,6 +1528,11 @@ storage_qualifier parseContext.requireExtensions($1.loc, 1, &E_GL_KHR_memory_scope_semantics, "nonprivate"); $$.qualifier.nonprivate = true; } + | SHADERCALLCOHERENT { + $$.init($1.loc); + parseContext.requireExtensions($1.loc, 1, &E_GL_EXT_ray_tracing, "shadercallcoherent"); + $$.qualifier.shadercallcoherent = true; + } | VOLATILE { $$.init($1.loc); $$.qualifier.volatil = true; @@ -2350,7 +2402,15 @@ type_specifier_nonarray } | ACCSTRUCTNV { $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtAccStructNV; + $$.basicType = EbtAccStruct; + } + | ACCSTRUCTEXT { + $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); + $$.basicType = EbtAccStruct; + } + | RAYQUERYEXT { + $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); + $$.basicType = EbtRayQuery; } | ATOMIC_UINT { parseContext.vulkanRemoved($1.loc, "atomic counter types"); @@ -3782,6 +3842,14 @@ function_definition : function_prototype { $1.function = parseContext.handleFunctionDeclarator($1.loc, *$1.function, false /* not prototype */); $1.intermNode = parseContext.handleFunctionDefinition($1.loc, *$1.function); + + // For ES 100 only, according to ES shading language 100 spec: A function + // body has a scope nested inside the function's definition. + if (parseContext.profile == EEsProfile && parseContext.version == 100) + { + parseContext.symbolTable.push(); + ++parseContext.statementNestingLevel; + } } compound_statement_no_new_scope { // May be best done as post process phase on intermediate code @@ -3797,6 +3865,17 @@ function_definition $$->getAsAggregate()->setOptimize(parseContext.contextPragma.optimize); $$->getAsAggregate()->setDebug(parseContext.contextPragma.debug); $$->getAsAggregate()->setPragmaTable(parseContext.contextPragma.pragmaTable); + + // Set currentFunctionType to empty pointer when goes outside of the function + parseContext.currentFunctionType = nullptr; + + // For ES 100 only, according to ES shading language 100 spec: A function + // body has a scope nested inside the function's definition. + if (parseContext.profile == EEsProfile && parseContext.version == 100) + { + parseContext.symbolTable.pop(&parseContext.defaultPrecision[0]); + --parseContext.statementNestingLevel; + } } ; diff --git a/thirdparty/glslang/glslang/MachineIndependent/glslang_tab.cpp b/thirdparty/glslang/glslang/MachineIndependent/glslang_tab.cpp index 2a47faada46..ac357977972 100644 --- a/thirdparty/glslang/glslang/MachineIndependent/glslang_tab.cpp +++ b/thirdparty/glslang/glslang/MachineIndependent/glslang_tab.cpp @@ -62,7 +62,7 @@ /* Copy the first part of user declarations. */ -#line 68 "MachineIndependent/glslang.y" /* yacc.c:339 */ +#line 69 "MachineIndependent/glslang.y" /* yacc.c:339 */ /* Based on: @@ -281,256 +281,265 @@ extern int yydebug; F64MAT4X4 = 413, ATOMIC_UINT = 414, ACCSTRUCTNV = 415, - FCOOPMATNV = 416, - ICOOPMATNV = 417, - UCOOPMATNV = 418, - SAMPLERCUBEARRAY = 419, - SAMPLERCUBEARRAYSHADOW = 420, - ISAMPLERCUBEARRAY = 421, - USAMPLERCUBEARRAY = 422, - SAMPLER1D = 423, - SAMPLER1DARRAY = 424, - SAMPLER1DARRAYSHADOW = 425, - ISAMPLER1D = 426, - SAMPLER1DSHADOW = 427, - SAMPLER2DRECT = 428, - SAMPLER2DRECTSHADOW = 429, - ISAMPLER2DRECT = 430, - USAMPLER2DRECT = 431, - SAMPLERBUFFER = 432, - ISAMPLERBUFFER = 433, - USAMPLERBUFFER = 434, - SAMPLER2DMS = 435, - ISAMPLER2DMS = 436, - USAMPLER2DMS = 437, - SAMPLER2DMSARRAY = 438, - ISAMPLER2DMSARRAY = 439, - USAMPLER2DMSARRAY = 440, - SAMPLEREXTERNALOES = 441, - SAMPLEREXTERNAL2DY2YEXT = 442, - ISAMPLER1DARRAY = 443, - USAMPLER1D = 444, - USAMPLER1DARRAY = 445, - F16SAMPLER1D = 446, - F16SAMPLER2D = 447, - F16SAMPLER3D = 448, - F16SAMPLER2DRECT = 449, - F16SAMPLERCUBE = 450, - F16SAMPLER1DARRAY = 451, - F16SAMPLER2DARRAY = 452, - F16SAMPLERCUBEARRAY = 453, - F16SAMPLERBUFFER = 454, - F16SAMPLER2DMS = 455, - F16SAMPLER2DMSARRAY = 456, - F16SAMPLER1DSHADOW = 457, - F16SAMPLER2DSHADOW = 458, - F16SAMPLER1DARRAYSHADOW = 459, - F16SAMPLER2DARRAYSHADOW = 460, - F16SAMPLER2DRECTSHADOW = 461, - F16SAMPLERCUBESHADOW = 462, - F16SAMPLERCUBEARRAYSHADOW = 463, - IMAGE1D = 464, - IIMAGE1D = 465, - UIMAGE1D = 466, - IMAGE2D = 467, - IIMAGE2D = 468, - UIMAGE2D = 469, - IMAGE3D = 470, - IIMAGE3D = 471, - UIMAGE3D = 472, - IMAGE2DRECT = 473, - IIMAGE2DRECT = 474, - UIMAGE2DRECT = 475, - IMAGECUBE = 476, - IIMAGECUBE = 477, - UIMAGECUBE = 478, - IMAGEBUFFER = 479, - IIMAGEBUFFER = 480, - UIMAGEBUFFER = 481, - IMAGE1DARRAY = 482, - IIMAGE1DARRAY = 483, - UIMAGE1DARRAY = 484, - IMAGE2DARRAY = 485, - IIMAGE2DARRAY = 486, - UIMAGE2DARRAY = 487, - IMAGECUBEARRAY = 488, - IIMAGECUBEARRAY = 489, - UIMAGECUBEARRAY = 490, - IMAGE2DMS = 491, - IIMAGE2DMS = 492, - UIMAGE2DMS = 493, - IMAGE2DMSARRAY = 494, - IIMAGE2DMSARRAY = 495, - UIMAGE2DMSARRAY = 496, - F16IMAGE1D = 497, - F16IMAGE2D = 498, - F16IMAGE3D = 499, - F16IMAGE2DRECT = 500, - F16IMAGECUBE = 501, - F16IMAGE1DARRAY = 502, - F16IMAGE2DARRAY = 503, - F16IMAGECUBEARRAY = 504, - F16IMAGEBUFFER = 505, - F16IMAGE2DMS = 506, - F16IMAGE2DMSARRAY = 507, - TEXTURECUBEARRAY = 508, - ITEXTURECUBEARRAY = 509, - UTEXTURECUBEARRAY = 510, - TEXTURE1D = 511, - ITEXTURE1D = 512, - UTEXTURE1D = 513, - TEXTURE1DARRAY = 514, - ITEXTURE1DARRAY = 515, - UTEXTURE1DARRAY = 516, - TEXTURE2DRECT = 517, - ITEXTURE2DRECT = 518, - UTEXTURE2DRECT = 519, - TEXTUREBUFFER = 520, - ITEXTUREBUFFER = 521, - UTEXTUREBUFFER = 522, - TEXTURE2DMS = 523, - ITEXTURE2DMS = 524, - UTEXTURE2DMS = 525, - TEXTURE2DMSARRAY = 526, - ITEXTURE2DMSARRAY = 527, - UTEXTURE2DMSARRAY = 528, - F16TEXTURE1D = 529, - F16TEXTURE2D = 530, - F16TEXTURE3D = 531, - F16TEXTURE2DRECT = 532, - F16TEXTURECUBE = 533, - F16TEXTURE1DARRAY = 534, - F16TEXTURE2DARRAY = 535, - F16TEXTURECUBEARRAY = 536, - F16TEXTUREBUFFER = 537, - F16TEXTURE2DMS = 538, - F16TEXTURE2DMSARRAY = 539, - SUBPASSINPUT = 540, - SUBPASSINPUTMS = 541, - ISUBPASSINPUT = 542, - ISUBPASSINPUTMS = 543, - USUBPASSINPUT = 544, - USUBPASSINPUTMS = 545, - F16SUBPASSINPUT = 546, - F16SUBPASSINPUTMS = 547, - LEFT_OP = 548, - RIGHT_OP = 549, - INC_OP = 550, - DEC_OP = 551, - LE_OP = 552, - GE_OP = 553, - EQ_OP = 554, - NE_OP = 555, - AND_OP = 556, - OR_OP = 557, - XOR_OP = 558, - MUL_ASSIGN = 559, - DIV_ASSIGN = 560, - ADD_ASSIGN = 561, - MOD_ASSIGN = 562, - LEFT_ASSIGN = 563, - RIGHT_ASSIGN = 564, - AND_ASSIGN = 565, - XOR_ASSIGN = 566, - OR_ASSIGN = 567, - SUB_ASSIGN = 568, - LEFT_PAREN = 569, - RIGHT_PAREN = 570, - LEFT_BRACKET = 571, - RIGHT_BRACKET = 572, - LEFT_BRACE = 573, - RIGHT_BRACE = 574, - DOT = 575, - COMMA = 576, - COLON = 577, - EQUAL = 578, - SEMICOLON = 579, - BANG = 580, - DASH = 581, - TILDE = 582, - PLUS = 583, - STAR = 584, - SLASH = 585, - PERCENT = 586, - LEFT_ANGLE = 587, - RIGHT_ANGLE = 588, - VERTICAL_BAR = 589, - CARET = 590, - AMPERSAND = 591, - QUESTION = 592, - INVARIANT = 593, - HIGH_PRECISION = 594, - MEDIUM_PRECISION = 595, - LOW_PRECISION = 596, - PRECISION = 597, - PACKED = 598, - RESOURCE = 599, - SUPERP = 600, - FLOATCONSTANT = 601, - INTCONSTANT = 602, - UINTCONSTANT = 603, - BOOLCONSTANT = 604, - IDENTIFIER = 605, - TYPE_NAME = 606, - CENTROID = 607, - IN = 608, - OUT = 609, - INOUT = 610, - STRUCT = 611, - VOID = 612, - WHILE = 613, - BREAK = 614, - CONTINUE = 615, - DO = 616, - ELSE = 617, - FOR = 618, - IF = 619, - DISCARD = 620, - RETURN = 621, - SWITCH = 622, - CASE = 623, - DEFAULT = 624, - UNIFORM = 625, - SHARED = 626, - BUFFER = 627, - FLAT = 628, - SMOOTH = 629, - LAYOUT = 630, - DOUBLECONSTANT = 631, - INT16CONSTANT = 632, - UINT16CONSTANT = 633, - FLOAT16CONSTANT = 634, - INT32CONSTANT = 635, - UINT32CONSTANT = 636, - INT64CONSTANT = 637, - UINT64CONSTANT = 638, - SUBROUTINE = 639, - DEMOTE = 640, - PAYLOADNV = 641, - PAYLOADINNV = 642, - HITATTRNV = 643, - CALLDATANV = 644, - CALLDATAINNV = 645, - PATCH = 646, - SAMPLE = 647, - NONUNIFORM = 648, - COHERENT = 649, - VOLATILE = 650, - RESTRICT = 651, - READONLY = 652, - WRITEONLY = 653, - DEVICECOHERENT = 654, - QUEUEFAMILYCOHERENT = 655, - WORKGROUPCOHERENT = 656, - SUBGROUPCOHERENT = 657, - NONPRIVATE = 658, - NOPERSPECTIVE = 659, - EXPLICITINTERPAMD = 660, - PERVERTEXNV = 661, - PERPRIMITIVENV = 662, - PERVIEWNV = 663, - PERTASKNV = 664, - PRECISE = 665 + ACCSTRUCTEXT = 416, + RAYQUERYEXT = 417, + FCOOPMATNV = 418, + ICOOPMATNV = 419, + UCOOPMATNV = 420, + SAMPLERCUBEARRAY = 421, + SAMPLERCUBEARRAYSHADOW = 422, + ISAMPLERCUBEARRAY = 423, + USAMPLERCUBEARRAY = 424, + SAMPLER1D = 425, + SAMPLER1DARRAY = 426, + SAMPLER1DARRAYSHADOW = 427, + ISAMPLER1D = 428, + SAMPLER1DSHADOW = 429, + SAMPLER2DRECT = 430, + SAMPLER2DRECTSHADOW = 431, + ISAMPLER2DRECT = 432, + USAMPLER2DRECT = 433, + SAMPLERBUFFER = 434, + ISAMPLERBUFFER = 435, + USAMPLERBUFFER = 436, + SAMPLER2DMS = 437, + ISAMPLER2DMS = 438, + USAMPLER2DMS = 439, + SAMPLER2DMSARRAY = 440, + ISAMPLER2DMSARRAY = 441, + USAMPLER2DMSARRAY = 442, + SAMPLEREXTERNALOES = 443, + SAMPLEREXTERNAL2DY2YEXT = 444, + ISAMPLER1DARRAY = 445, + USAMPLER1D = 446, + USAMPLER1DARRAY = 447, + F16SAMPLER1D = 448, + F16SAMPLER2D = 449, + F16SAMPLER3D = 450, + F16SAMPLER2DRECT = 451, + F16SAMPLERCUBE = 452, + F16SAMPLER1DARRAY = 453, + F16SAMPLER2DARRAY = 454, + F16SAMPLERCUBEARRAY = 455, + F16SAMPLERBUFFER = 456, + F16SAMPLER2DMS = 457, + F16SAMPLER2DMSARRAY = 458, + F16SAMPLER1DSHADOW = 459, + F16SAMPLER2DSHADOW = 460, + F16SAMPLER1DARRAYSHADOW = 461, + F16SAMPLER2DARRAYSHADOW = 462, + F16SAMPLER2DRECTSHADOW = 463, + F16SAMPLERCUBESHADOW = 464, + F16SAMPLERCUBEARRAYSHADOW = 465, + IMAGE1D = 466, + IIMAGE1D = 467, + UIMAGE1D = 468, + IMAGE2D = 469, + IIMAGE2D = 470, + UIMAGE2D = 471, + IMAGE3D = 472, + IIMAGE3D = 473, + UIMAGE3D = 474, + IMAGE2DRECT = 475, + IIMAGE2DRECT = 476, + UIMAGE2DRECT = 477, + IMAGECUBE = 478, + IIMAGECUBE = 479, + UIMAGECUBE = 480, + IMAGEBUFFER = 481, + IIMAGEBUFFER = 482, + UIMAGEBUFFER = 483, + IMAGE1DARRAY = 484, + IIMAGE1DARRAY = 485, + UIMAGE1DARRAY = 486, + IMAGE2DARRAY = 487, + IIMAGE2DARRAY = 488, + UIMAGE2DARRAY = 489, + IMAGECUBEARRAY = 490, + IIMAGECUBEARRAY = 491, + UIMAGECUBEARRAY = 492, + IMAGE2DMS = 493, + IIMAGE2DMS = 494, + UIMAGE2DMS = 495, + IMAGE2DMSARRAY = 496, + IIMAGE2DMSARRAY = 497, + UIMAGE2DMSARRAY = 498, + F16IMAGE1D = 499, + F16IMAGE2D = 500, + F16IMAGE3D = 501, + F16IMAGE2DRECT = 502, + F16IMAGECUBE = 503, + F16IMAGE1DARRAY = 504, + F16IMAGE2DARRAY = 505, + F16IMAGECUBEARRAY = 506, + F16IMAGEBUFFER = 507, + F16IMAGE2DMS = 508, + F16IMAGE2DMSARRAY = 509, + TEXTURECUBEARRAY = 510, + ITEXTURECUBEARRAY = 511, + UTEXTURECUBEARRAY = 512, + TEXTURE1D = 513, + ITEXTURE1D = 514, + UTEXTURE1D = 515, + TEXTURE1DARRAY = 516, + ITEXTURE1DARRAY = 517, + UTEXTURE1DARRAY = 518, + TEXTURE2DRECT = 519, + ITEXTURE2DRECT = 520, + UTEXTURE2DRECT = 521, + TEXTUREBUFFER = 522, + ITEXTUREBUFFER = 523, + UTEXTUREBUFFER = 524, + TEXTURE2DMS = 525, + ITEXTURE2DMS = 526, + UTEXTURE2DMS = 527, + TEXTURE2DMSARRAY = 528, + ITEXTURE2DMSARRAY = 529, + UTEXTURE2DMSARRAY = 530, + F16TEXTURE1D = 531, + F16TEXTURE2D = 532, + F16TEXTURE3D = 533, + F16TEXTURE2DRECT = 534, + F16TEXTURECUBE = 535, + F16TEXTURE1DARRAY = 536, + F16TEXTURE2DARRAY = 537, + F16TEXTURECUBEARRAY = 538, + F16TEXTUREBUFFER = 539, + F16TEXTURE2DMS = 540, + F16TEXTURE2DMSARRAY = 541, + SUBPASSINPUT = 542, + SUBPASSINPUTMS = 543, + ISUBPASSINPUT = 544, + ISUBPASSINPUTMS = 545, + USUBPASSINPUT = 546, + USUBPASSINPUTMS = 547, + F16SUBPASSINPUT = 548, + F16SUBPASSINPUTMS = 549, + LEFT_OP = 550, + RIGHT_OP = 551, + INC_OP = 552, + DEC_OP = 553, + LE_OP = 554, + GE_OP = 555, + EQ_OP = 556, + NE_OP = 557, + AND_OP = 558, + OR_OP = 559, + XOR_OP = 560, + MUL_ASSIGN = 561, + DIV_ASSIGN = 562, + ADD_ASSIGN = 563, + MOD_ASSIGN = 564, + LEFT_ASSIGN = 565, + RIGHT_ASSIGN = 566, + AND_ASSIGN = 567, + XOR_ASSIGN = 568, + OR_ASSIGN = 569, + SUB_ASSIGN = 570, + STRING_LITERAL = 571, + LEFT_PAREN = 572, + RIGHT_PAREN = 573, + LEFT_BRACKET = 574, + RIGHT_BRACKET = 575, + LEFT_BRACE = 576, + RIGHT_BRACE = 577, + DOT = 578, + COMMA = 579, + COLON = 580, + EQUAL = 581, + SEMICOLON = 582, + BANG = 583, + DASH = 584, + TILDE = 585, + PLUS = 586, + STAR = 587, + SLASH = 588, + PERCENT = 589, + LEFT_ANGLE = 590, + RIGHT_ANGLE = 591, + VERTICAL_BAR = 592, + CARET = 593, + AMPERSAND = 594, + QUESTION = 595, + INVARIANT = 596, + HIGH_PRECISION = 597, + MEDIUM_PRECISION = 598, + LOW_PRECISION = 599, + PRECISION = 600, + PACKED = 601, + RESOURCE = 602, + SUPERP = 603, + FLOATCONSTANT = 604, + INTCONSTANT = 605, + UINTCONSTANT = 606, + BOOLCONSTANT = 607, + IDENTIFIER = 608, + TYPE_NAME = 609, + CENTROID = 610, + IN = 611, + OUT = 612, + INOUT = 613, + STRUCT = 614, + VOID = 615, + WHILE = 616, + BREAK = 617, + CONTINUE = 618, + DO = 619, + ELSE = 620, + FOR = 621, + IF = 622, + DISCARD = 623, + RETURN = 624, + SWITCH = 625, + CASE = 626, + DEFAULT = 627, + UNIFORM = 628, + SHARED = 629, + BUFFER = 630, + FLAT = 631, + SMOOTH = 632, + LAYOUT = 633, + DOUBLECONSTANT = 634, + INT16CONSTANT = 635, + UINT16CONSTANT = 636, + FLOAT16CONSTANT = 637, + INT32CONSTANT = 638, + UINT32CONSTANT = 639, + INT64CONSTANT = 640, + UINT64CONSTANT = 641, + SUBROUTINE = 642, + DEMOTE = 643, + PAYLOADNV = 644, + PAYLOADINNV = 645, + HITATTRNV = 646, + CALLDATANV = 647, + CALLDATAINNV = 648, + PAYLOADEXT = 649, + PAYLOADINEXT = 650, + HITATTREXT = 651, + CALLDATAEXT = 652, + CALLDATAINEXT = 653, + PATCH = 654, + SAMPLE = 655, + NONUNIFORM = 656, + COHERENT = 657, + VOLATILE = 658, + RESTRICT = 659, + READONLY = 660, + WRITEONLY = 661, + DEVICECOHERENT = 662, + QUEUEFAMILYCOHERENT = 663, + WORKGROUPCOHERENT = 664, + SUBGROUPCOHERENT = 665, + NONPRIVATE = 666, + SHADERCALLCOHERENT = 667, + NOPERSPECTIVE = 668, + EXPLICITINTERPAMD = 669, + PERVERTEXNV = 670, + PERPRIMITIVENV = 671, + PERVIEWNV = 672, + PERTASKNV = 673, + PRECISE = 674 }; #endif @@ -539,7 +548,7 @@ extern int yydebug; union YYSTYPE { -#line 96 "MachineIndependent/glslang.y" /* yacc.c:355 */ +#line 97 "MachineIndependent/glslang.y" /* yacc.c:355 */ struct { glslang::TSourceLoc loc; @@ -575,7 +584,7 @@ union YYSTYPE glslang::TArraySizes* typeParameters; } interm; -#line 579 "MachineIndependent/glslang_tab.cpp" /* yacc.c:355 */ +#line 588 "MachineIndependent/glslang_tab.cpp" /* yacc.c:355 */ }; typedef union YYSTYPE YYSTYPE; @@ -590,7 +599,7 @@ int yyparse (glslang::TParseContext* pParseContext); #endif /* !YY_YY_MACHINEINDEPENDENT_GLSLANG_TAB_CPP_H_INCLUDED */ /* Copy the second part of user declarations. */ -#line 132 "MachineIndependent/glslang.y" /* yacc.c:358 */ +#line 133 "MachineIndependent/glslang.y" /* yacc.c:358 */ /* windows only pragma */ @@ -606,7 +615,7 @@ int yyparse (glslang::TParseContext* pParseContext); extern int yylex(YYSTYPE*, TParseContext&); -#line 610 "MachineIndependent/glslang_tab.cpp" /* yacc.c:358 */ +#line 619 "MachineIndependent/glslang_tab.cpp" /* yacc.c:358 */ #ifdef short # undef short @@ -846,23 +855,23 @@ union yyalloc #endif /* !YYCOPY_NEEDED */ /* YYFINAL -- State number of the termination state. */ -#define YYFINAL 386 +#define YYFINAL 394 /* YYLAST -- Last index in YYTABLE. */ -#define YYLAST 9369 +#define YYLAST 9550 /* YYNTOKENS -- Number of terminals. */ -#define YYNTOKENS 411 +#define YYNTOKENS 420 /* YYNNTS -- Number of nonterminals. */ #define YYNNTS 111 /* YYNRULES -- Number of rules. */ -#define YYNRULES 582 +#define YYNRULES 591 /* YYNSTATES -- Number of states. */ -#define YYNSTATES 727 +#define YYNSTATES 736 /* YYTRANSLATE[YYX] -- Symbol number corresponding to YYX as returned by yylex, with out-of-bounds checking. */ #define YYUNDEFTOK 2 -#define YYMAXUTOK 665 +#define YYMAXUTOK 674 #define YYTRANSLATE(YYX) \ ((unsigned int) (YYX) <= YYMAXUTOK ? yytranslate[YYX] : YYUNDEFTOK) @@ -937,72 +946,74 @@ static const yytype_uint16 yytranslate[] = 375, 376, 377, 378, 379, 380, 381, 382, 383, 384, 385, 386, 387, 388, 389, 390, 391, 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, 402, 403, 404, - 405, 406, 407, 408, 409, 410 + 405, 406, 407, 408, 409, 410, 411, 412, 413, 414, + 415, 416, 417, 418, 419 }; #if YYDEBUG /* YYRLINE[YYN] -- Source line where rule number YYN was defined. */ static const yytype_uint16 yyrline[] = { - 0, 352, 352, 358, 361, 366, 369, 372, 376, 380, - 384, 388, 392, 396, 400, 404, 408, 416, 419, 422, - 425, 428, 433, 441, 448, 455, 461, 465, 472, 475, - 481, 488, 498, 506, 511, 539, 548, 554, 558, 562, - 582, 583, 584, 585, 591, 592, 597, 602, 611, 612, - 617, 625, 626, 632, 641, 642, 647, 652, 657, 665, - 666, 675, 687, 688, 697, 698, 707, 708, 717, 718, - 726, 727, 735, 736, 744, 745, 745, 763, 764, 780, - 784, 788, 792, 797, 801, 805, 809, 813, 817, 821, - 828, 831, 842, 849, 854, 859, 866, 870, 874, 878, - 883, 888, 897, 897, 908, 912, 919, 926, 929, 936, - 944, 964, 987, 1002, 1027, 1038, 1048, 1058, 1068, 1077, - 1080, 1084, 1088, 1093, 1101, 1108, 1113, 1118, 1123, 1132, - 1142, 1169, 1178, 1185, 1193, 1200, 1207, 1215, 1225, 1232, - 1243, 1249, 1252, 1259, 1263, 1267, 1276, 1286, 1289, 1300, - 1303, 1306, 1310, 1314, 1319, 1323, 1330, 1334, 1339, 1345, - 1351, 1358, 1363, 1371, 1377, 1389, 1403, 1409, 1414, 1422, - 1430, 1438, 1446, 1453, 1457, 1462, 1467, 1472, 1477, 1482, - 1486, 1490, 1494, 1498, 1504, 1515, 1522, 1525, 1534, 1539, - 1549, 1554, 1562, 1566, 1576, 1579, 1585, 1591, 1598, 1608, - 1612, 1616, 1620, 1625, 1629, 1634, 1639, 1644, 1649, 1654, - 1659, 1664, 1669, 1674, 1680, 1686, 1692, 1697, 1702, 1707, - 1712, 1717, 1722, 1727, 1732, 1737, 1742, 1747, 1753, 1758, - 1763, 1768, 1773, 1778, 1783, 1788, 1793, 1798, 1803, 1808, - 1813, 1819, 1825, 1831, 1837, 1843, 1849, 1855, 1861, 1867, - 1873, 1879, 1885, 1891, 1897, 1903, 1909, 1915, 1921, 1927, - 1933, 1939, 1945, 1951, 1957, 1963, 1969, 1975, 1981, 1987, - 1993, 1999, 2005, 2011, 2017, 2023, 2029, 2035, 2041, 2047, - 2053, 2059, 2065, 2071, 2077, 2083, 2089, 2095, 2101, 2107, - 2113, 2119, 2125, 2131, 2137, 2143, 2149, 2155, 2161, 2167, - 2173, 2179, 2185, 2191, 2197, 2203, 2209, 2215, 2221, 2227, - 2233, 2239, 2245, 2251, 2257, 2263, 2269, 2275, 2281, 2287, - 2293, 2299, 2305, 2311, 2317, 2321, 2326, 2332, 2337, 2342, - 2347, 2352, 2357, 2362, 2368, 2373, 2378, 2383, 2388, 2393, - 2399, 2405, 2411, 2417, 2423, 2429, 2435, 2441, 2447, 2453, - 2459, 2465, 2471, 2477, 2482, 2487, 2492, 2497, 2502, 2507, - 2513, 2518, 2523, 2528, 2533, 2538, 2543, 2548, 2554, 2559, - 2564, 2569, 2574, 2579, 2584, 2589, 2594, 2599, 2604, 2609, - 2614, 2619, 2624, 2630, 2635, 2640, 2646, 2652, 2657, 2662, - 2667, 2673, 2678, 2683, 2688, 2694, 2699, 2704, 2709, 2715, - 2720, 2725, 2730, 2736, 2742, 2748, 2754, 2759, 2765, 2771, - 2777, 2782, 2787, 2792, 2797, 2802, 2808, 2813, 2818, 2823, - 2829, 2834, 2839, 2844, 2850, 2855, 2860, 2865, 2871, 2876, - 2881, 2886, 2892, 2897, 2902, 2907, 2913, 2918, 2923, 2928, - 2934, 2939, 2944, 2949, 2955, 2960, 2965, 2970, 2976, 2981, - 2986, 2991, 2997, 3002, 3007, 3012, 3018, 3023, 3028, 3033, - 3039, 3044, 3049, 3054, 3060, 3065, 3070, 3075, 3081, 3086, - 3091, 3096, 3102, 3107, 3112, 3118, 3124, 3130, 3136, 3143, - 3150, 3156, 3162, 3168, 3174, 3180, 3186, 3193, 3198, 3214, - 3219, 3224, 3232, 3232, 3243, 3243, 3253, 3256, 3269, 3291, - 3318, 3322, 3328, 3333, 3344, 3348, 3354, 3365, 3368, 3375, - 3379, 3380, 3386, 3387, 3388, 3389, 3390, 3391, 3392, 3394, - 3400, 3409, 3410, 3414, 3410, 3426, 3427, 3431, 3431, 3438, - 3438, 3452, 3455, 3463, 3471, 3482, 3483, 3487, 3491, 3498, - 3505, 3509, 3517, 3521, 3534, 3538, 3545, 3545, 3565, 3568, - 3574, 3586, 3598, 3602, 3609, 3609, 3624, 3624, 3640, 3640, - 3661, 3664, 3670, 3673, 3679, 3683, 3690, 3695, 3700, 3707, - 3710, 3719, 3723, 3732, 3735, 3739, 3748, 3748, 3771, 3777, - 3780, 3785, 3788 + 0, 357, 357, 363, 366, 371, 374, 377, 381, 385, + 388, 392, 396, 400, 404, 408, 412, 418, 426, 429, + 432, 435, 438, 443, 451, 458, 465, 471, 475, 482, + 485, 491, 498, 508, 516, 521, 549, 558, 564, 568, + 572, 592, 593, 594, 595, 601, 602, 607, 612, 621, + 622, 627, 635, 636, 642, 651, 652, 657, 662, 667, + 675, 676, 685, 697, 698, 707, 708, 717, 718, 727, + 728, 736, 737, 745, 746, 754, 755, 755, 773, 774, + 790, 794, 798, 802, 807, 811, 815, 819, 823, 827, + 831, 838, 841, 852, 859, 864, 869, 876, 880, 884, + 888, 893, 898, 907, 907, 918, 922, 929, 936, 939, + 946, 954, 974, 997, 1012, 1037, 1048, 1058, 1068, 1078, + 1087, 1090, 1094, 1098, 1103, 1111, 1118, 1123, 1128, 1133, + 1142, 1152, 1179, 1188, 1195, 1203, 1210, 1217, 1225, 1235, + 1242, 1253, 1259, 1262, 1269, 1273, 1277, 1286, 1296, 1299, + 1310, 1313, 1316, 1320, 1324, 1329, 1333, 1340, 1344, 1349, + 1355, 1361, 1368, 1373, 1381, 1387, 1399, 1413, 1419, 1424, + 1432, 1440, 1448, 1456, 1464, 1472, 1480, 1488, 1495, 1502, + 1506, 1511, 1516, 1521, 1526, 1531, 1536, 1540, 1544, 1548, + 1552, 1558, 1569, 1576, 1579, 1588, 1593, 1603, 1608, 1616, + 1620, 1630, 1633, 1639, 1645, 1652, 1662, 1666, 1670, 1674, + 1679, 1683, 1688, 1693, 1698, 1703, 1708, 1713, 1718, 1723, + 1728, 1734, 1740, 1746, 1751, 1756, 1761, 1766, 1771, 1776, + 1781, 1786, 1791, 1796, 1801, 1807, 1814, 1819, 1824, 1829, + 1834, 1839, 1844, 1849, 1854, 1859, 1864, 1869, 1877, 1885, + 1893, 1899, 1905, 1911, 1917, 1923, 1929, 1935, 1941, 1947, + 1953, 1959, 1965, 1971, 1977, 1983, 1989, 1995, 2001, 2007, + 2013, 2019, 2025, 2031, 2037, 2043, 2049, 2055, 2061, 2067, + 2073, 2079, 2085, 2091, 2099, 2107, 2115, 2123, 2131, 2139, + 2147, 2155, 2163, 2171, 2179, 2187, 2193, 2199, 2205, 2211, + 2217, 2223, 2229, 2235, 2241, 2247, 2253, 2259, 2265, 2271, + 2277, 2283, 2289, 2295, 2301, 2307, 2313, 2319, 2325, 2331, + 2337, 2343, 2349, 2355, 2361, 2367, 2373, 2379, 2385, 2391, + 2397, 2403, 2407, 2411, 2415, 2420, 2426, 2431, 2436, 2441, + 2446, 2451, 2456, 2462, 2467, 2472, 2477, 2482, 2487, 2493, + 2499, 2505, 2511, 2517, 2523, 2529, 2535, 2541, 2547, 2553, + 2559, 2565, 2571, 2576, 2581, 2586, 2591, 2596, 2601, 2607, + 2612, 2617, 2622, 2627, 2632, 2637, 2642, 2648, 2653, 2658, + 2663, 2668, 2673, 2678, 2683, 2688, 2693, 2698, 2703, 2708, + 2713, 2718, 2724, 2729, 2734, 2740, 2746, 2751, 2756, 2761, + 2767, 2772, 2777, 2782, 2788, 2793, 2798, 2803, 2809, 2814, + 2819, 2824, 2830, 2836, 2842, 2848, 2853, 2859, 2865, 2871, + 2876, 2881, 2886, 2891, 2896, 2902, 2907, 2912, 2917, 2923, + 2928, 2933, 2938, 2944, 2949, 2954, 2959, 2965, 2970, 2975, + 2980, 2986, 2991, 2996, 3001, 3007, 3012, 3017, 3022, 3028, + 3033, 3038, 3043, 3049, 3054, 3059, 3064, 3070, 3075, 3080, + 3085, 3091, 3096, 3101, 3106, 3112, 3117, 3122, 3127, 3133, + 3138, 3143, 3148, 3154, 3159, 3164, 3169, 3175, 3180, 3185, + 3190, 3196, 3201, 3206, 3212, 3218, 3224, 3230, 3237, 3244, + 3250, 3256, 3262, 3268, 3274, 3280, 3287, 3292, 3308, 3313, + 3318, 3326, 3326, 3337, 3337, 3347, 3350, 3363, 3385, 3412, + 3416, 3422, 3427, 3438, 3442, 3448, 3459, 3462, 3469, 3473, + 3474, 3480, 3481, 3482, 3483, 3484, 3485, 3486, 3488, 3494, + 3503, 3504, 3508, 3504, 3520, 3521, 3525, 3525, 3532, 3532, + 3546, 3549, 3557, 3565, 3576, 3577, 3581, 3585, 3592, 3599, + 3603, 3611, 3615, 3628, 3632, 3639, 3639, 3659, 3662, 3668, + 3680, 3692, 3696, 3703, 3703, 3718, 3718, 3734, 3734, 3755, + 3758, 3764, 3767, 3773, 3777, 3784, 3789, 3794, 3801, 3804, + 3813, 3817, 3826, 3829, 3833, 3842, 3842, 3884, 3890, 3893, + 3898, 3901 }; #endif @@ -1039,16 +1050,17 @@ static const char *const yytname[] = "F32MAT3X2", "F32MAT3X3", "F32MAT3X4", "F32MAT4X2", "F32MAT4X3", "F32MAT4X4", "F64MAT2X2", "F64MAT2X3", "F64MAT2X4", "F64MAT3X2", "F64MAT3X3", "F64MAT3X4", "F64MAT4X2", "F64MAT4X3", "F64MAT4X4", - "ATOMIC_UINT", "ACCSTRUCTNV", "FCOOPMATNV", "ICOOPMATNV", "UCOOPMATNV", - "SAMPLERCUBEARRAY", "SAMPLERCUBEARRAYSHADOW", "ISAMPLERCUBEARRAY", - "USAMPLERCUBEARRAY", "SAMPLER1D", "SAMPLER1DARRAY", - "SAMPLER1DARRAYSHADOW", "ISAMPLER1D", "SAMPLER1DSHADOW", "SAMPLER2DRECT", - "SAMPLER2DRECTSHADOW", "ISAMPLER2DRECT", "USAMPLER2DRECT", - "SAMPLERBUFFER", "ISAMPLERBUFFER", "USAMPLERBUFFER", "SAMPLER2DMS", - "ISAMPLER2DMS", "USAMPLER2DMS", "SAMPLER2DMSARRAY", "ISAMPLER2DMSARRAY", - "USAMPLER2DMSARRAY", "SAMPLEREXTERNALOES", "SAMPLEREXTERNAL2DY2YEXT", - "ISAMPLER1DARRAY", "USAMPLER1D", "USAMPLER1DARRAY", "F16SAMPLER1D", - "F16SAMPLER2D", "F16SAMPLER3D", "F16SAMPLER2DRECT", "F16SAMPLERCUBE", + "ATOMIC_UINT", "ACCSTRUCTNV", "ACCSTRUCTEXT", "RAYQUERYEXT", + "FCOOPMATNV", "ICOOPMATNV", "UCOOPMATNV", "SAMPLERCUBEARRAY", + "SAMPLERCUBEARRAYSHADOW", "ISAMPLERCUBEARRAY", "USAMPLERCUBEARRAY", + "SAMPLER1D", "SAMPLER1DARRAY", "SAMPLER1DARRAYSHADOW", "ISAMPLER1D", + "SAMPLER1DSHADOW", "SAMPLER2DRECT", "SAMPLER2DRECTSHADOW", + "ISAMPLER2DRECT", "USAMPLER2DRECT", "SAMPLERBUFFER", "ISAMPLERBUFFER", + "USAMPLERBUFFER", "SAMPLER2DMS", "ISAMPLER2DMS", "USAMPLER2DMS", + "SAMPLER2DMSARRAY", "ISAMPLER2DMSARRAY", "USAMPLER2DMSARRAY", + "SAMPLEREXTERNALOES", "SAMPLEREXTERNAL2DY2YEXT", "ISAMPLER1DARRAY", + "USAMPLER1D", "USAMPLER1DARRAY", "F16SAMPLER1D", "F16SAMPLER2D", + "F16SAMPLER3D", "F16SAMPLER2DRECT", "F16SAMPLERCUBE", "F16SAMPLER1DARRAY", "F16SAMPLER2DARRAY", "F16SAMPLERCUBEARRAY", "F16SAMPLERBUFFER", "F16SAMPLER2DMS", "F16SAMPLER2DMSARRAY", "F16SAMPLER1DSHADOW", "F16SAMPLER2DSHADOW", "F16SAMPLER1DARRAYSHADOW", @@ -1078,28 +1090,30 @@ static const char *const yytname[] = "F16SUBPASSINPUTMS", "LEFT_OP", "RIGHT_OP", "INC_OP", "DEC_OP", "LE_OP", "GE_OP", "EQ_OP", "NE_OP", "AND_OP", "OR_OP", "XOR_OP", "MUL_ASSIGN", "DIV_ASSIGN", "ADD_ASSIGN", "MOD_ASSIGN", "LEFT_ASSIGN", "RIGHT_ASSIGN", - "AND_ASSIGN", "XOR_ASSIGN", "OR_ASSIGN", "SUB_ASSIGN", "LEFT_PAREN", - "RIGHT_PAREN", "LEFT_BRACKET", "RIGHT_BRACKET", "LEFT_BRACE", - "RIGHT_BRACE", "DOT", "COMMA", "COLON", "EQUAL", "SEMICOLON", "BANG", - "DASH", "TILDE", "PLUS", "STAR", "SLASH", "PERCENT", "LEFT_ANGLE", - "RIGHT_ANGLE", "VERTICAL_BAR", "CARET", "AMPERSAND", "QUESTION", - "INVARIANT", "HIGH_PRECISION", "MEDIUM_PRECISION", "LOW_PRECISION", - "PRECISION", "PACKED", "RESOURCE", "SUPERP", "FLOATCONSTANT", - "INTCONSTANT", "UINTCONSTANT", "BOOLCONSTANT", "IDENTIFIER", "TYPE_NAME", - "CENTROID", "IN", "OUT", "INOUT", "STRUCT", "VOID", "WHILE", "BREAK", - "CONTINUE", "DO", "ELSE", "FOR", "IF", "DISCARD", "RETURN", "SWITCH", - "CASE", "DEFAULT", "UNIFORM", "SHARED", "BUFFER", "FLAT", "SMOOTH", - "LAYOUT", "DOUBLECONSTANT", "INT16CONSTANT", "UINT16CONSTANT", - "FLOAT16CONSTANT", "INT32CONSTANT", "UINT32CONSTANT", "INT64CONSTANT", - "UINT64CONSTANT", "SUBROUTINE", "DEMOTE", "PAYLOADNV", "PAYLOADINNV", - "HITATTRNV", "CALLDATANV", "CALLDATAINNV", "PATCH", "SAMPLE", - "NONUNIFORM", "COHERENT", "VOLATILE", "RESTRICT", "READONLY", + "AND_ASSIGN", "XOR_ASSIGN", "OR_ASSIGN", "SUB_ASSIGN", "STRING_LITERAL", + "LEFT_PAREN", "RIGHT_PAREN", "LEFT_BRACKET", "RIGHT_BRACKET", + "LEFT_BRACE", "RIGHT_BRACE", "DOT", "COMMA", "COLON", "EQUAL", + "SEMICOLON", "BANG", "DASH", "TILDE", "PLUS", "STAR", "SLASH", "PERCENT", + "LEFT_ANGLE", "RIGHT_ANGLE", "VERTICAL_BAR", "CARET", "AMPERSAND", + "QUESTION", "INVARIANT", "HIGH_PRECISION", "MEDIUM_PRECISION", + "LOW_PRECISION", "PRECISION", "PACKED", "RESOURCE", "SUPERP", + "FLOATCONSTANT", "INTCONSTANT", "UINTCONSTANT", "BOOLCONSTANT", + "IDENTIFIER", "TYPE_NAME", "CENTROID", "IN", "OUT", "INOUT", "STRUCT", + "VOID", "WHILE", "BREAK", "CONTINUE", "DO", "ELSE", "FOR", "IF", + "DISCARD", "RETURN", "SWITCH", "CASE", "DEFAULT", "UNIFORM", "SHARED", + "BUFFER", "FLAT", "SMOOTH", "LAYOUT", "DOUBLECONSTANT", "INT16CONSTANT", + "UINT16CONSTANT", "FLOAT16CONSTANT", "INT32CONSTANT", "UINT32CONSTANT", + "INT64CONSTANT", "UINT64CONSTANT", "SUBROUTINE", "DEMOTE", "PAYLOADNV", + "PAYLOADINNV", "HITATTRNV", "CALLDATANV", "CALLDATAINNV", "PAYLOADEXT", + "PAYLOADINEXT", "HITATTREXT", "CALLDATAEXT", "CALLDATAINEXT", "PATCH", + "SAMPLE", "NONUNIFORM", "COHERENT", "VOLATILE", "RESTRICT", "READONLY", "WRITEONLY", "DEVICECOHERENT", "QUEUEFAMILYCOHERENT", - "WORKGROUPCOHERENT", "SUBGROUPCOHERENT", "NONPRIVATE", "NOPERSPECTIVE", - "EXPLICITINTERPAMD", "PERVERTEXNV", "PERPRIMITIVENV", "PERVIEWNV", - "PERTASKNV", "PRECISE", "$accept", "variable_identifier", - "primary_expression", "postfix_expression", "integer_expression", - "function_call", "function_call_or_method", "function_call_generic", + "WORKGROUPCOHERENT", "SUBGROUPCOHERENT", "NONPRIVATE", + "SHADERCALLCOHERENT", "NOPERSPECTIVE", "EXPLICITINTERPAMD", + "PERVERTEXNV", "PERPRIMITIVENV", "PERVIEWNV", "PERTASKNV", "PRECISE", + "$accept", "variable_identifier", "primary_expression", + "postfix_expression", "integer_expression", "function_call", + "function_call_or_method", "function_call_generic", "function_call_header_no_parameters", "function_call_header_with_parameters", "function_call_header", "function_identifier", "unary_expression", "unary_operator", @@ -1185,16 +1199,16 @@ static const yytype_uint16 yytoknum[] = 635, 636, 637, 638, 639, 640, 641, 642, 643, 644, 645, 646, 647, 648, 649, 650, 651, 652, 653, 654, 655, 656, 657, 658, 659, 660, 661, 662, 663, 664, - 665 + 665, 666, 667, 668, 669, 670, 671, 672, 673, 674 }; # endif -#define YYPACT_NINF -453 +#define YYPACT_NINF -457 #define yypact_value_is_default(Yystate) \ - (!!((Yystate) == (-453))) + (!!((Yystate) == (-457))) -#define YYTABLE_NINF -528 +#define YYTABLE_NINF -537 #define yytable_value_is_error(Yytable_value) \ 0 @@ -1203,79 +1217,80 @@ static const yytype_uint16 yytoknum[] = STATE-NUM. */ static const yytype_int16 yypact[] = { - 3994, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, 97, -453, -453, -453, - -453, -453, 6, -453, -453, -453, -453, -453, -453, -307, - -241, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -3, 95, 36, - 125, 6034, 82, -453, -22, -453, -453, -453, -453, 4402, - -453, -453, -453, -453, 131, -453, -453, 730, -453, -453, - 11, -453, 153, -28, 127, -453, 7, -453, 157, -453, - 6034, -453, -453, -453, 6034, 129, 134, -453, 13, -453, - 73, -453, -453, 8391, 162, -453, -453, -453, 161, 6034, - -453, 163, -453, -309, -453, -453, 27, 6831, -453, 16, - 1138, -453, -453, -453, -453, 162, 23, -453, 7221, 49, - -453, 138, -453, 87, 8391, 8391, 8391, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, -453, 68, -453, -453, -453, - 174, 60, 8781, 176, -453, 8391, -453, -453, -320, 175, - -453, 6034, 142, 4810, -453, 6034, 8391, -453, -28, -453, - 143, -453, -453, 119, 128, 32, 21, 38, 158, 160, - 165, 195, 194, 18, 183, 7611, -453, 185, 184, -453, - -453, 188, 180, 181, -453, 196, 197, 190, 8001, 198, - 8391, 187, 193, 122, -453, -453, 91, -453, 95, 204, - 205, -453, -453, -453, -453, -453, 1546, -453, -453, -453, - -453, -453, -453, -453, -453, -453, -353, 175, 7221, 69, - 7221, -453, -453, 7221, 6034, -453, 170, -453, -453, -453, - 78, -453, -453, 8391, 171, -453, -453, 8391, 207, -453, - -453, -453, 8391, -453, 142, 162, 93, -453, -453, -453, - 5218, -453, -453, -453, -453, 8391, 8391, 8391, 8391, 8391, - 8391, 8391, 8391, 8391, 8391, 8391, 8391, 8391, 8391, 8391, - 8391, 8391, 8391, 8391, -453, -453, -453, 206, 177, -453, - 1954, -453, -453, -453, 1954, -453, 8391, -453, -453, 100, - 8391, 144, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, -453, -453, -453, 8391, 8391, -453, -453, -453, - -453, -453, -453, -453, 7221, -453, 140, -453, 5626, -453, - -453, 209, 208, -453, -453, -453, 123, 175, 142, -453, - -453, -453, -453, -453, 119, 119, 128, 128, 32, 32, - 32, 32, 21, 21, 38, 158, 160, 165, 195, 194, - 8391, -453, 214, 56, -453, 1954, 3586, 172, 3178, 80, - -453, 81, -453, -453, -453, -453, -453, 6441, -453, -453, - -453, -453, 146, 8391, 215, 177, 212, 208, 186, 6034, - 219, 221, -453, -453, 3586, 220, -453, -453, -453, 8391, - 222, -453, -453, -453, 216, 2362, 8391, -453, 217, 227, - 182, 225, 2770, -453, 229, -453, -453, 7221, -453, -453, - -453, 89, 8391, 2362, 220, -453, -453, 1954, -453, 224, - 208, -453, -453, 1954, 226, -453, -453 + 4075, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, 132, -457, + -457, -457, -457, -457, -1, -457, -457, -457, -457, -457, + -457, -301, -298, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, 11, -249, 17, 30, 6160, + 20, -457, 28, -457, -457, -457, -457, 4492, -457, -457, + -457, -457, 50, -457, -457, 739, -457, -457, 16, -457, + 81, -29, 69, -457, -313, -457, 111, -457, 6160, -457, + -457, -457, 6160, 103, 106, -457, -314, -457, 72, -457, + -457, 8566, 142, -457, -457, -457, 136, 6160, -457, 144, + -457, 53, -457, -457, 76, 6974, -457, -312, 1156, -457, + -457, -457, -457, 142, -309, -457, 7372, -308, -457, 119, + -457, 65, 8566, 8566, -457, 8566, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, -457, 36, -457, -457, -457, 171, + 85, 8964, 173, -457, 8566, -457, -457, -323, 174, -457, + 6160, 139, 4909, -457, 6160, 8566, -457, -29, -457, 141, + -457, -457, 145, 99, 35, 26, 71, 156, 159, 161, + 196, 195, 23, 181, 7770, -457, 183, 182, -457, -457, + 186, 179, 180, -457, 191, 192, 187, 8168, 193, 8566, + 188, 189, 127, -457, -457, 96, -457, -249, 200, 201, + -457, -457, -457, -457, -457, 1573, -457, -457, -457, -457, + -457, -457, -457, -457, -457, -24, 174, 7372, 13, 7372, + -457, -457, 7372, 6160, -457, 166, -457, -457, -457, 86, + -457, -457, 8566, 168, -457, -457, 8566, 205, -457, -457, + -457, 8566, -457, 139, 142, 124, -457, -457, -457, 5326, + -457, -457, -457, -457, 8566, 8566, 8566, 8566, 8566, 8566, + 8566, 8566, 8566, 8566, 8566, 8566, 8566, 8566, 8566, 8566, + 8566, 8566, 8566, -457, -457, -457, 206, 172, -457, 1990, + -457, -457, -457, 1990, -457, 8566, -457, -457, 130, 8566, + 125, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, -457, -457, 8566, 8566, -457, -457, -457, -457, + -457, -457, -457, 7372, -457, 94, -457, 5743, -457, -457, + 207, 204, -457, -457, -457, 131, 174, 139, -457, -457, + -457, -457, -457, 145, 145, 99, 99, 35, 35, 35, + 35, 26, 26, 71, 156, 159, 161, 196, 195, 8566, + -457, 212, 60, -457, 1990, 3658, 169, 3241, 87, -457, + 89, -457, -457, -457, -457, -457, 6576, -457, -457, -457, + -457, 143, 8566, 211, 172, 210, 204, 184, 6160, 217, + 219, -457, -457, 3658, 218, -457, -457, -457, 8566, 220, + -457, -457, -457, 214, 2407, 8566, -457, 216, 223, 185, + 224, 2824, -457, 225, -457, -457, 7372, -457, -457, -457, + 97, 8566, 2407, 218, -457, -457, 1990, -457, 222, 204, + -457, -457, 1990, 229, -457, -457 }; /* YYDEFACT[STATE-NUM] -- Default reduction number in state STATE-NUM. @@ -1283,113 +1298,114 @@ static const yytype_int16 yypact[] = means the default is an error. */ static const yytype_uint16 yydefact[] = { - 0, 156, 203, 201, 202, 200, 207, 208, 209, 210, - 211, 212, 213, 214, 215, 204, 205, 206, 216, 217, - 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, - 327, 328, 329, 330, 331, 332, 333, 353, 354, 355, - 356, 357, 358, 359, 368, 381, 382, 369, 370, 372, - 371, 373, 374, 375, 376, 377, 378, 379, 380, 164, - 165, 229, 230, 228, 231, 238, 239, 236, 237, 234, - 235, 232, 233, 261, 262, 263, 273, 274, 275, 258, - 259, 260, 270, 271, 272, 255, 256, 257, 267, 268, - 269, 252, 253, 254, 264, 265, 266, 240, 241, 242, - 276, 277, 278, 243, 244, 245, 288, 289, 290, 246, - 247, 248, 300, 301, 302, 249, 250, 251, 312, 313, - 314, 279, 280, 281, 282, 283, 284, 285, 286, 287, - 291, 292, 293, 294, 295, 296, 297, 298, 299, 303, - 304, 305, 306, 307, 308, 309, 310, 311, 315, 316, - 317, 318, 319, 320, 321, 322, 323, 325, 324, 484, - 485, 486, 337, 338, 361, 364, 326, 335, 336, 352, - 334, 383, 384, 387, 388, 389, 391, 392, 393, 395, - 396, 397, 399, 400, 474, 475, 360, 362, 363, 339, - 340, 341, 385, 342, 346, 347, 350, 390, 394, 398, - 343, 344, 348, 349, 386, 345, 351, 430, 432, 433, - 434, 436, 437, 438, 440, 441, 442, 444, 445, 446, - 448, 449, 450, 452, 453, 454, 456, 457, 458, 460, - 461, 462, 464, 465, 466, 468, 469, 470, 472, 473, - 431, 435, 439, 443, 447, 455, 459, 463, 451, 467, - 471, 365, 366, 367, 401, 410, 412, 406, 411, 413, - 414, 416, 417, 418, 420, 421, 422, 424, 425, 426, - 428, 429, 402, 403, 404, 415, 405, 407, 408, 409, - 419, 423, 427, 476, 477, 480, 481, 482, 483, 478, - 479, 575, 131, 489, 490, 491, 0, 488, 160, 158, - 159, 157, 0, 199, 161, 162, 163, 133, 132, 0, - 183, 169, 170, 168, 171, 172, 166, 167, 185, 173, - 179, 180, 181, 182, 174, 175, 176, 177, 178, 134, - 135, 136, 137, 138, 139, 146, 574, 0, 576, 0, - 108, 107, 0, 119, 124, 153, 152, 150, 154, 0, - 147, 149, 155, 129, 195, 151, 487, 0, 571, 573, - 0, 494, 0, 0, 0, 96, 0, 93, 0, 106, - 0, 115, 109, 117, 0, 118, 0, 94, 125, 99, - 0, 148, 130, 0, 188, 194, 1, 572, 0, 0, - 492, 143, 145, 0, 141, 186, 0, 0, 97, 0, - 0, 577, 110, 114, 116, 112, 120, 111, 0, 126, - 102, 0, 100, 0, 0, 0, 0, 42, 41, 43, - 40, 5, 6, 7, 8, 2, 15, 13, 14, 16, - 9, 10, 11, 12, 3, 17, 36, 19, 24, 25, - 0, 0, 29, 0, 197, 0, 35, 33, 0, 189, - 95, 0, 0, 0, 496, 0, 0, 140, 0, 184, - 0, 190, 44, 48, 51, 54, 59, 62, 64, 66, - 68, 70, 72, 74, 0, 0, 98, 0, 522, 531, - 535, 0, 0, 0, 556, 0, 0, 0, 0, 0, - 0, 0, 0, 44, 77, 90, 0, 509, 0, 155, - 129, 512, 533, 511, 519, 510, 0, 513, 514, 537, - 515, 544, 516, 517, 552, 518, 0, 113, 0, 121, - 0, 504, 128, 0, 0, 104, 0, 101, 37, 38, - 0, 21, 22, 0, 0, 27, 26, 0, 199, 30, - 32, 39, 0, 196, 0, 502, 0, 500, 495, 497, - 0, 92, 144, 142, 187, 0, 0, 0, 0, 0, + 0, 157, 210, 208, 209, 207, 214, 215, 216, 217, + 218, 219, 220, 221, 222, 211, 212, 213, 223, 224, + 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, + 336, 337, 338, 339, 340, 341, 342, 362, 363, 364, + 365, 366, 367, 368, 377, 390, 391, 378, 379, 381, + 380, 382, 383, 384, 385, 386, 387, 388, 389, 165, + 166, 236, 237, 235, 238, 245, 246, 243, 244, 241, + 242, 239, 240, 268, 269, 270, 280, 281, 282, 265, + 266, 267, 277, 278, 279, 262, 263, 264, 274, 275, + 276, 259, 260, 261, 271, 272, 273, 247, 248, 249, + 283, 284, 285, 250, 251, 252, 295, 296, 297, 253, + 254, 255, 307, 308, 309, 256, 257, 258, 319, 320, + 321, 286, 287, 288, 289, 290, 291, 292, 293, 294, + 298, 299, 300, 301, 302, 303, 304, 305, 306, 310, + 311, 312, 313, 314, 315, 316, 317, 318, 322, 323, + 324, 325, 326, 327, 328, 329, 330, 334, 331, 332, + 333, 493, 494, 495, 346, 347, 370, 373, 335, 344, + 345, 361, 343, 392, 393, 396, 397, 398, 400, 401, + 402, 404, 405, 406, 408, 409, 483, 484, 369, 371, + 372, 348, 349, 350, 394, 351, 355, 356, 359, 399, + 403, 407, 352, 353, 357, 358, 395, 354, 360, 439, + 441, 442, 443, 445, 446, 447, 449, 450, 451, 453, + 454, 455, 457, 458, 459, 461, 462, 463, 465, 466, + 467, 469, 470, 471, 473, 474, 475, 477, 478, 479, + 481, 482, 440, 444, 448, 452, 456, 464, 468, 472, + 460, 476, 480, 374, 375, 376, 410, 419, 421, 415, + 420, 422, 423, 425, 426, 427, 429, 430, 431, 433, + 434, 435, 437, 438, 411, 412, 413, 424, 414, 416, + 417, 418, 428, 432, 436, 485, 486, 489, 490, 491, + 492, 487, 488, 584, 132, 498, 499, 500, 0, 497, + 161, 159, 160, 158, 0, 206, 162, 163, 164, 134, + 133, 0, 190, 171, 173, 169, 175, 177, 172, 174, + 170, 176, 178, 167, 168, 192, 179, 186, 187, 188, + 189, 180, 181, 182, 183, 184, 185, 135, 136, 137, + 138, 139, 140, 147, 583, 0, 585, 0, 109, 108, + 0, 120, 125, 154, 153, 151, 155, 0, 148, 150, + 156, 130, 202, 152, 496, 0, 580, 582, 0, 503, + 0, 0, 0, 97, 0, 94, 0, 107, 0, 116, + 110, 118, 0, 119, 0, 95, 126, 100, 0, 149, + 131, 0, 195, 201, 1, 581, 0, 0, 501, 144, + 146, 0, 142, 193, 0, 0, 98, 0, 0, 586, + 111, 115, 117, 113, 121, 112, 0, 127, 103, 0, + 101, 0, 0, 0, 9, 0, 43, 42, 44, 41, + 5, 6, 7, 8, 2, 16, 14, 15, 17, 10, + 11, 12, 13, 3, 18, 37, 20, 25, 26, 0, + 0, 30, 0, 204, 0, 36, 34, 0, 196, 96, + 0, 0, 0, 505, 0, 0, 141, 0, 191, 0, + 197, 45, 49, 52, 55, 60, 63, 65, 67, 69, + 71, 73, 75, 0, 0, 99, 0, 531, 540, 544, + 0, 0, 0, 565, 0, 0, 0, 0, 0, 0, + 0, 0, 45, 78, 91, 0, 518, 0, 156, 130, + 521, 542, 520, 528, 519, 0, 522, 523, 546, 524, + 553, 525, 526, 561, 527, 0, 114, 0, 122, 0, + 513, 129, 0, 0, 105, 0, 102, 38, 39, 0, + 22, 23, 0, 0, 28, 27, 0, 206, 31, 33, + 40, 0, 203, 0, 511, 0, 509, 504, 506, 0, + 93, 145, 143, 194, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 75, 191, 192, 0, 0, 521, - 0, 554, 567, 566, 0, 558, 0, 570, 568, 0, - 0, 0, 551, 520, 80, 81, 83, 82, 85, 86, - 87, 88, 89, 84, 79, 0, 0, 536, 532, 534, - 538, 545, 553, 123, 0, 507, 0, 127, 0, 105, - 4, 0, 23, 20, 31, 198, 0, 503, 0, 498, - 493, 45, 46, 47, 50, 49, 52, 53, 57, 58, - 55, 56, 60, 61, 63, 65, 67, 69, 71, 73, - 0, 193, 581, 0, 579, 523, 0, 0, 0, 0, - 569, 0, 550, 78, 91, 122, 505, 0, 103, 18, - 499, 501, 0, 0, 0, 0, 0, 542, 0, 0, - 0, 0, 561, 560, 563, 529, 546, 506, 508, 0, - 0, 578, 580, 524, 0, 0, 0, 562, 0, 0, - 541, 0, 0, 539, 0, 76, 582, 0, 526, 555, - 525, 0, 564, 0, 529, 528, 530, 548, 543, 0, - 565, 559, 540, 549, 0, 557, 547 + 0, 0, 0, 76, 198, 199, 0, 0, 530, 0, + 563, 576, 575, 0, 567, 0, 579, 577, 0, 0, + 0, 560, 529, 81, 82, 84, 83, 86, 87, 88, + 89, 90, 85, 80, 0, 0, 545, 541, 543, 547, + 554, 562, 124, 0, 516, 0, 128, 0, 106, 4, + 0, 24, 21, 32, 205, 0, 512, 0, 507, 502, + 46, 47, 48, 51, 50, 53, 54, 58, 59, 56, + 57, 61, 62, 64, 66, 68, 70, 72, 74, 0, + 200, 590, 0, 588, 532, 0, 0, 0, 0, 578, + 0, 559, 79, 92, 123, 514, 0, 104, 19, 508, + 510, 0, 0, 0, 0, 0, 551, 0, 0, 0, + 0, 570, 569, 572, 538, 555, 515, 517, 0, 0, + 587, 589, 533, 0, 0, 0, 571, 0, 0, 550, + 0, 0, 548, 0, 77, 591, 0, 535, 564, 534, + 0, 573, 0, 538, 537, 539, 557, 552, 0, 574, + 568, 549, 558, 0, 566, 556 }; /* YYPGOTO[NTERM-NUM]. */ static const yytype_int16 yypgoto[] = { - -453, -453, -453, -453, -453, -453, -453, -453, -453, -453, - -453, -453, 8696, -453, -89, -88, -122, -84, -19, -18, - -17, -16, -20, -15, -453, -85, -453, -98, -453, -110, - -119, 2, -453, -453, -453, 4, -453, -453, -453, 189, - 191, 192, -453, -453, -339, -453, -453, -453, -453, 98, - -453, -37, -44, -453, 9, -453, 0, -71, -453, -453, - -453, -453, 261, -453, -453, -453, -452, -137, 20, -68, - -209, -453, -96, -198, -326, -453, -136, -453, -453, -146, - -144, -453, -453, 200, -265, -87, -453, 57, -453, -112, - -453, 59, -453, -453, -453, -453, 61, -453, -453, -453, - -453, -453, -453, -453, -453, 228, -453, -453, -453, -453, - -99 + -457, -457, -457, -457, -457, -457, -457, -457, -457, -457, + -457, -457, 8868, -457, -87, -84, -127, -93, -33, -31, + -27, -25, -28, -26, -457, -86, -457, -103, -457, -111, + -125, 2, -457, -457, -457, 4, -457, -457, -457, 176, + 194, 178, -457, -457, -337, -457, -457, -457, -457, 95, + -457, -37, -46, -457, 9, -457, 0, -63, -457, -457, + -457, -457, 263, -457, -457, -457, -456, -140, 10, -73, + -211, -457, -102, -198, -321, -457, -144, -457, -457, -155, + -154, -457, -457, 198, -274, -97, -457, 46, -457, -118, + -457, 51, -457, -457, -457, -457, 52, -457, -457, -457, + -457, -457, -457, -457, -457, 213, -457, -457, -457, -457, + -105 }; /* YYDEFGOTO[NTERM-NUM]. */ static const yytype_int16 yydefgoto[] = { - -1, 434, 435, 436, 621, 437, 438, 439, 440, 441, - 442, 443, 493, 445, 463, 464, 465, 466, 467, 468, - 469, 470, 471, 472, 473, 494, 650, 495, 605, 496, - 552, 497, 337, 524, 413, 498, 339, 340, 341, 371, - 372, 373, 342, 343, 344, 345, 346, 347, 393, 394, - 348, 349, 350, 351, 446, 396, 447, 399, 384, 385, - 448, 354, 355, 356, 455, 389, 453, 454, 546, 547, - 522, 616, 501, 502, 503, 504, 505, 580, 676, 709, - 700, 701, 702, 710, 506, 507, 508, 509, 703, 680, - 510, 511, 704, 724, 512, 513, 514, 656, 584, 658, - 684, 698, 699, 515, 357, 358, 359, 368, 516, 653, - 654 + -1, 443, 444, 445, 630, 446, 447, 448, 449, 450, + 451, 452, 502, 454, 472, 473, 474, 475, 476, 477, + 478, 479, 480, 481, 482, 503, 659, 504, 614, 505, + 561, 506, 345, 533, 421, 507, 347, 348, 349, 379, + 380, 381, 350, 351, 352, 353, 354, 355, 401, 402, + 356, 357, 358, 359, 455, 404, 456, 407, 392, 393, + 457, 362, 363, 364, 464, 397, 462, 463, 555, 556, + 531, 625, 510, 511, 512, 513, 514, 589, 685, 718, + 709, 710, 711, 719, 515, 516, 517, 518, 712, 689, + 519, 520, 713, 733, 521, 522, 523, 665, 593, 667, + 693, 707, 708, 524, 365, 366, 367, 376, 525, 662, + 663 }; /* YYTABLE[YYPACT[STATE-NUM]] -- What to do in state STATE-NUM. If @@ -1397,161 +1413,122 @@ static const yytype_int16 yydefgoto[] = number is the opposite. If YYTABLE_NINF, syntax error. */ static const yytype_int16 yytable[] = { - 353, 542, 336, 550, 338, 481, 457, 363, 484, 352, - 485, 486, 458, 543, 489, 2, 3, 4, 5, 6, - 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, - 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, - 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, - 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, - 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, - 57, 58, 618, 364, 61, 62, 63, 64, 65, 66, - 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, - 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, - 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, - 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, - 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, - 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, - 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, - 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, - 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, - 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, - 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, - 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, - 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, - 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, - 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, - 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, - 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, - 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, - 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, - 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, - 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, - 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, - 287, 288, 289, 290, 374, 381, 530, 409, 609, 613, - 521, 615, 474, 449, 617, 655, 549, 678, 562, 563, - 573, 365, 391, 397, 361, 560, 561, 407, 378, 397, - 381, 398, 475, 374, 517, 519, 408, 566, 567, 397, - 476, 375, 459, 392, 539, 678, 518, 366, 460, 382, - 352, 369, 451, 564, 565, 574, 362, 353, 352, 336, - 388, 338, 297, 531, 532, 475, 352, 302, 303, 708, - 375, 551, 523, 674, 375, 536, 716, 675, 589, 352, - 591, 537, -34, 352, 533, 475, 657, 708, 534, 452, - 577, 410, 614, 620, 411, 685, 686, 412, 352, 606, - 500, 606, 606, 376, 719, 665, 377, 381, 526, 499, - 606, 527, 606, 549, 628, 607, 451, 629, 451, 367, - 521, 606, 521, 622, 660, 521, 594, 595, 596, 597, - 598, 599, 600, 601, 602, 603, 293, 294, 295, 624, - 638, 639, 640, 641, 628, 604, 370, 670, 555, 556, - 557, 544, 723, 452, 558, 452, 559, 609, 688, 666, - 352, 667, 352, 383, 352, 606, 662, 606, 689, 634, - 635, 390, 636, 637, 627, 400, 659, 395, 397, 405, - 661, 549, 642, 643, 406, 450, 456, 451, 525, 535, - 540, 475, 545, 554, 568, 569, 571, 572, 718, 570, - 575, 578, 581, 579, 582, 583, 500, 663, 664, 592, - 585, 586, 590, 451, 587, 499, 521, 593, -35, -33, - 619, 623, -28, 651, 452, 609, 669, 652, 673, 606, - 681, 693, 691, 352, 695, 696, 694, 706, -527, 707, - 672, 712, 713, 478, 714, 726, 677, 717, 725, 644, - 452, 645, 648, 646, 690, 647, 553, 360, 649, 352, - 671, 402, 682, 403, 626, 715, 404, 721, 401, 521, - 722, 683, 697, 610, 677, 611, 692, 612, 0, 0, - 500, 451, 0, 0, 500, 387, 711, 0, 551, 499, - 0, 705, 0, 499, 0, 0, 0, 0, 0, 0, - 0, 0, 720, 0, 0, 0, 0, 0, 0, 521, - 0, 0, 0, 0, 0, 0, 0, 0, 452, 679, - 0, 0, 0, 0, 0, 0, 0, 352, 0, 0, - 0, 0, 0, 0, 0, 381, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 679, 0, 0, - 0, 0, 0, 0, 0, 500, 500, 0, 500, 0, - 0, 0, 0, 0, 499, 499, 0, 499, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 382, - 0, 0, 0, 0, 500, 0, 0, 0, 352, 0, - 0, 0, 0, 499, 0, 500, 0, 0, 0, 0, - 0, 0, 500, 0, 499, 0, 0, 0, 0, 0, - 0, 499, 0, 500, 0, 0, 0, 500, 0, 0, - 0, 0, 499, 500, 0, 0, 499, 0, 0, 0, - 386, 0, 499, 1, 2, 3, 4, 5, 6, 7, - 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, - 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, - 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, - 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, - 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, - 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, - 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, - 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, - 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, - 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, - 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, - 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, - 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, - 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, - 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, - 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, - 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, - 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, - 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, - 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, - 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, - 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, - 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, - 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, - 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, - 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, - 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, - 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, - 288, 289, 290, 0, 0, 0, 0, 0, 0, 0, + 361, 551, 344, 415, 346, 405, 405, 484, 559, 360, + 405, 484, 416, 552, 406, 485, 371, 527, 532, 372, + 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, + 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, + 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, + 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, + 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, + 52, 53, 54, 55, 56, 57, 58, 627, 375, 61, + 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, + 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, + 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, + 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, + 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, + 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, + 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, + 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, + 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, + 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, + 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, + 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, + 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, + 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, + 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, + 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, + 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, + 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, + 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, + 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, + 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, + 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, + 292, 389, 382, 530, 539, 664, 622, 618, 624, 483, + 369, 626, 558, 417, 399, 571, 572, 582, 687, 458, + 569, 570, 484, 540, 541, 377, 389, 490, 373, 623, + 493, 382, 494, 495, 384, 400, 498, 385, 548, 383, + 526, 528, 370, -35, 378, 542, 687, 390, 360, 543, + 460, 573, 574, 583, 374, 361, 360, 344, 396, 346, + 299, 466, 575, 576, 360, 304, 305, 467, 383, 560, + 683, 386, 383, 717, 684, 391, 598, 360, 600, 535, + 725, 360, 536, 418, 468, 666, 419, 461, 586, 420, + 469, 717, 398, 545, 629, 694, 360, 695, 509, 546, + 615, 615, 674, 615, 389, 728, 675, 508, 676, 558, + 615, 615, 403, 616, 530, 460, 530, 460, 567, 530, + 568, 631, 408, 603, 604, 605, 606, 607, 608, 609, + 610, 611, 612, 633, 647, 648, 649, 650, 637, 615, + 671, 638, 732, 613, 615, 637, 413, 669, 679, 414, + 553, 405, 461, 459, 461, 697, 618, 615, 698, 360, + 465, 360, 534, 360, 295, 296, 297, 564, 565, 566, + 643, 644, 651, 652, 668, 645, 646, 558, 670, 544, + 549, 636, 554, 484, 563, 577, 460, 578, 579, 580, + 581, 584, 587, 590, 588, 727, 591, 592, 594, 595, + 599, 672, 673, 601, 596, 509, 602, -36, -34, 628, + 530, 632, 460, -29, 508, 661, 660, 678, 615, 682, + 690, 700, 702, 461, 618, 704, 705, 703, 715, -536, + 716, 722, 360, 721, 653, 487, 726, 654, 681, 734, + 723, 735, 655, 657, 686, 656, 658, 699, 411, 461, + 412, 368, 562, 635, 680, 691, 724, 730, 360, 731, + 692, 619, 410, 530, 409, 706, 620, 621, 395, 701, + 0, 0, 686, 0, 0, 0, 0, 0, 0, 509, + 460, 0, 0, 509, 720, 714, 560, 0, 508, 0, + 0, 0, 508, 0, 0, 0, 0, 0, 0, 0, + 729, 0, 0, 530, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 461, 688, 0, + 0, 0, 0, 0, 0, 0, 360, 0, 0, 0, + 0, 0, 389, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 688, 0, 0, 0, + 0, 0, 0, 0, 509, 509, 0, 509, 0, 0, + 0, 0, 0, 508, 508, 0, 508, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 390, 0, + 0, 0, 0, 509, 0, 0, 0, 360, 0, 0, + 0, 0, 508, 0, 509, 0, 0, 0, 0, 0, + 0, 509, 0, 508, 0, 0, 0, 0, 0, 0, + 508, 0, 509, 0, 0, 0, 509, 0, 0, 0, + 0, 508, 509, 0, 0, 508, 0, 0, 0, 394, + 0, 508, 1, 2, 3, 4, 5, 6, 7, 8, + 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, + 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, + 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, + 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, + 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, + 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, + 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, + 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, + 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, + 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, + 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, + 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, + 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, + 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, + 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, + 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, + 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, + 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, + 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, + 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, + 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, + 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, + 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, + 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, + 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, + 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, + 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, + 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, + 289, 290, 291, 292, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 291, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 292, 293, - 294, 295, 296, 0, 0, 0, 0, 0, 0, 0, - 0, 297, 298, 299, 300, 301, 302, 303, 0, 0, + 0, 0, 0, 0, 0, 0, 293, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 304, 305, 306, 307, 308, 309, 0, 0, 0, 0, - 0, 0, 0, 0, 310, 0, 311, 312, 313, 314, + 294, 295, 296, 297, 298, 0, 0, 0, 0, 0, + 0, 0, 0, 299, 300, 301, 302, 303, 304, 305, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 306, 307, 308, 309, 310, 311, 0, 0, + 0, 0, 0, 0, 0, 0, 312, 0, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, - 335, 1, 2, 3, 4, 5, 6, 7, 8, 9, - 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, - 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, - 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, - 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, - 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, - 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, - 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, - 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, - 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, - 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, - 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, - 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, - 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, - 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, - 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, - 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, - 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, - 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, - 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, - 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, - 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, - 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, - 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, - 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, - 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, - 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, - 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, - 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, - 290, 0, 0, 414, 415, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 416, 0, 477, 0, 478, 479, 0, 0, - 0, 0, 480, 417, 418, 419, 420, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 292, 293, 294, 295, - 296, 0, 0, 0, 421, 422, 423, 424, 425, 297, - 298, 299, 300, 301, 302, 303, 481, 482, 483, 484, - 0, 485, 486, 487, 488, 489, 490, 491, 304, 305, - 306, 307, 308, 309, 426, 427, 428, 429, 430, 431, - 432, 433, 310, 492, 311, 312, 313, 314, 315, 316, - 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, - 327, 328, 329, 330, 331, 332, 333, 334, 335, 1, + 335, 336, 337, 338, 339, 340, 341, 342, 343, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, @@ -1580,19 +1557,145 @@ static const yytype_int16 yytable[] = 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, - 282, 283, 284, 285, 286, 287, 288, 289, 290, 0, - 0, 414, 415, 0, 0, 0, 0, 0, 0, 0, + 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, + 292, 0, 0, 422, 423, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 416, 0, 477, 0, 478, 608, 0, 0, 0, 0, - 480, 417, 418, 419, 420, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 292, 293, 294, 295, 296, 0, - 0, 0, 421, 422, 423, 424, 425, 297, 298, 299, - 300, 301, 302, 303, 481, 482, 483, 484, 0, 485, - 486, 487, 488, 489, 490, 491, 304, 305, 306, 307, - 308, 309, 426, 427, 428, 429, 430, 431, 432, 433, - 310, 492, 311, 312, 313, 314, 315, 316, 317, 318, - 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, - 329, 330, 331, 332, 333, 334, 335, 1, 2, 3, + 0, 0, 424, 425, 0, 486, 0, 487, 488, 0, + 0, 0, 0, 489, 426, 427, 428, 429, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 294, 295, 296, + 297, 298, 0, 0, 0, 430, 431, 432, 433, 434, + 299, 300, 301, 302, 303, 304, 305, 490, 491, 492, + 493, 0, 494, 495, 496, 497, 498, 499, 500, 306, + 307, 308, 309, 310, 311, 435, 436, 437, 438, 439, + 440, 441, 442, 312, 501, 313, 314, 315, 316, 317, + 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, + 328, 329, 330, 331, 332, 333, 334, 335, 336, 337, + 338, 339, 340, 341, 342, 343, 1, 2, 3, 4, + 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, + 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, + 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, + 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, + 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, + 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, + 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, + 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, + 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, + 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, + 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, + 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, + 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, + 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, + 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, + 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, + 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, + 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, + 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, + 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, + 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, + 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, + 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, + 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, + 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, + 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, + 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, + 285, 286, 287, 288, 289, 290, 291, 292, 0, 0, + 422, 423, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 424, + 425, 0, 486, 0, 487, 617, 0, 0, 0, 0, + 489, 426, 427, 428, 429, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 294, 295, 296, 297, 298, 0, + 0, 0, 430, 431, 432, 433, 434, 299, 300, 301, + 302, 303, 304, 305, 490, 491, 492, 493, 0, 494, + 495, 496, 497, 498, 499, 500, 306, 307, 308, 309, + 310, 311, 435, 436, 437, 438, 439, 440, 441, 442, + 312, 501, 313, 314, 315, 316, 317, 318, 319, 320, + 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, + 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, + 341, 342, 343, 1, 2, 3, 4, 5, 6, 7, + 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, + 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, + 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, + 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, + 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, + 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, + 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, + 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, + 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, + 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, + 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, + 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, + 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, + 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, + 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, + 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, + 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, + 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, + 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, + 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, + 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, + 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, + 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, + 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, + 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, + 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, + 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, + 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, + 288, 289, 290, 291, 292, 0, 0, 422, 423, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 424, 425, 0, 486, + 0, 487, 0, 0, 0, 0, 0, 489, 426, 427, + 428, 429, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 294, 295, 296, 297, 298, 0, 0, 0, 430, + 431, 432, 433, 434, 299, 300, 301, 302, 303, 304, + 305, 490, 491, 492, 493, 0, 494, 495, 496, 497, + 498, 499, 500, 306, 307, 308, 309, 310, 311, 435, + 436, 437, 438, 439, 440, 441, 442, 312, 501, 313, + 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, + 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, + 334, 335, 336, 337, 338, 339, 340, 341, 342, 343, + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, + 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, + 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, + 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, + 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, + 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, + 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, + 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, + 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, + 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, + 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, + 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, + 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, + 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, + 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, + 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, + 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, + 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, + 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, + 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, + 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, + 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, + 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, + 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, + 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, + 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, + 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, + 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, + 291, 292, 0, 0, 422, 423, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 424, 425, 0, 486, 0, 408, 0, + 0, 0, 0, 0, 489, 426, 427, 428, 429, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 294, 295, + 296, 297, 298, 0, 0, 0, 430, 431, 432, 433, + 434, 299, 300, 301, 302, 303, 304, 305, 490, 491, + 492, 493, 0, 494, 495, 496, 497, 498, 499, 500, + 306, 307, 308, 309, 310, 311, 435, 436, 437, 438, + 439, 440, 441, 442, 312, 501, 313, 314, 315, 316, + 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, + 327, 328, 329, 330, 331, 332, 333, 334, 335, 336, + 337, 338, 339, 340, 341, 342, 343, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, @@ -1621,19 +1724,145 @@ static const yytype_int16 yytable[] = 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, - 284, 285, 286, 287, 288, 289, 290, 0, 0, 414, - 415, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 416, 0, - 477, 0, 478, 0, 0, 0, 0, 0, 480, 417, - 418, 419, 420, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 292, 293, 294, 295, 296, 0, 0, 0, - 421, 422, 423, 424, 425, 297, 298, 299, 300, 301, - 302, 303, 481, 482, 483, 484, 0, 485, 486, 487, - 488, 489, 490, 491, 304, 305, 306, 307, 308, 309, - 426, 427, 428, 429, 430, 431, 432, 433, 310, 492, - 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, - 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, - 331, 332, 333, 334, 335, 1, 2, 3, 4, 5, + 284, 285, 286, 287, 288, 289, 290, 291, 292, 0, + 0, 422, 423, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 424, 425, 0, 486, 0, 0, 0, 0, 0, 0, + 0, 489, 426, 427, 428, 429, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 294, 295, 296, 297, 298, + 0, 0, 0, 430, 431, 432, 433, 434, 299, 300, + 301, 302, 303, 304, 305, 490, 491, 492, 493, 0, + 494, 495, 496, 497, 498, 499, 500, 306, 307, 308, + 309, 310, 311, 435, 436, 437, 438, 439, 440, 441, + 442, 312, 501, 313, 314, 315, 316, 317, 318, 319, + 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, + 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, + 340, 341, 342, 343, 1, 2, 3, 4, 5, 6, + 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, + 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, + 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, + 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, + 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, + 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, + 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, + 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, + 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, + 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, + 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, + 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, + 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, + 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, + 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, + 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, + 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, + 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, + 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, + 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, + 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, + 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, + 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, + 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, + 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, + 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, + 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, + 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, + 287, 288, 289, 290, 291, 292, 0, 0, 422, 423, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 424, 425, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 489, 426, + 427, 428, 429, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 294, 295, 296, 297, 298, 0, 0, 0, + 430, 431, 432, 433, 434, 299, 300, 301, 302, 303, + 304, 305, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 306, 307, 308, 309, 310, 311, + 435, 436, 437, 438, 439, 440, 441, 442, 312, 0, + 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, + 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, + 333, 334, 335, 336, 337, 338, 339, 340, 341, 342, + 343, 1, 2, 3, 4, 5, 6, 7, 8, 9, + 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, + 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, + 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, + 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, + 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, + 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, + 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, + 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, + 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, + 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, + 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, + 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, + 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, + 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, + 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, + 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, + 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, + 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, + 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, + 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, + 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, + 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, + 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, + 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, + 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, + 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, + 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, + 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, + 290, 291, 292, 0, 0, 422, 423, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 424, 425, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 426, 427, 428, 429, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 294, + 295, 296, 297, 0, 0, 0, 0, 430, 431, 432, + 433, 434, 299, 300, 301, 302, 303, 304, 305, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 306, 307, 308, 309, 310, 311, 435, 436, 437, + 438, 439, 440, 441, 442, 312, 0, 313, 314, 315, + 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, + 326, 327, 328, 329, 330, 331, 332, 333, 334, 335, + 336, 337, 338, 339, 340, 341, 342, 343, 1, 2, + 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, + 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, + 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, + 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, + 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, + 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, + 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, + 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, + 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, + 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, + 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, + 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, + 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, + 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, + 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, + 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, + 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, + 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, + 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, + 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, + 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, + 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, + 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, + 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, + 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, + 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, + 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, + 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 293, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 294, 295, 296, 297, + 298, 0, 0, 0, 0, 0, 0, 0, 0, 299, + 300, 301, 302, 303, 304, 305, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 306, 307, + 308, 309, 310, 311, 0, 0, 0, 0, 0, 0, + 0, 0, 312, 0, 313, 314, 315, 316, 317, 318, + 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, + 329, 330, 331, 332, 333, 334, 335, 336, 337, 338, + 339, 340, 341, 342, 343, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, @@ -1662,100 +1891,61 @@ static const yytype_int16 yytable[] = 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, - 286, 287, 288, 289, 290, 0, 0, 414, 415, 0, + 286, 287, 288, 289, 290, 291, 292, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 416, 0, 477, 0, - 400, 0, 0, 0, 0, 0, 480, 417, 418, 419, - 420, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 292, 293, 294, 295, 296, 0, 0, 0, 421, 422, - 423, 424, 425, 297, 298, 299, 300, 301, 302, 303, - 481, 482, 483, 484, 0, 485, 486, 487, 488, 489, - 490, 491, 304, 305, 306, 307, 308, 309, 426, 427, - 428, 429, 430, 431, 432, 433, 310, 492, 311, 312, - 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, - 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, - 333, 334, 335, 1, 2, 3, 4, 5, 6, 7, - 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, - 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, - 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, - 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, - 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, - 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, - 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, - 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, - 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, - 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, - 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, - 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, - 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, - 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, - 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, - 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, - 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, - 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, - 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, - 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, - 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, - 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, - 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, - 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, - 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, - 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, - 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, - 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, - 288, 289, 290, 0, 0, 414, 415, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 416, 0, 477, 0, 0, 0, - 0, 0, 0, 0, 480, 417, 418, 419, 420, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 292, 293, - 294, 295, 296, 0, 0, 0, 421, 422, 423, 424, - 425, 297, 298, 299, 300, 301, 302, 303, 481, 482, - 483, 484, 0, 485, 486, 487, 488, 489, 490, 491, - 304, 305, 306, 307, 308, 309, 426, 427, 428, 429, - 430, 431, 432, 433, 310, 492, 311, 312, 313, 314, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 387, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 294, 295, 296, 297, 0, 0, 0, + 0, 0, 0, 0, 0, 388, 299, 300, 301, 302, + 303, 304, 305, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 306, 307, 308, 309, 310, + 311, 0, 0, 0, 0, 0, 0, 0, 0, 312, + 0, 313, 314, 315, 316, 317, 318, 319, 320, 321, + 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, + 332, 333, 334, 335, 336, 337, 338, 339, 340, 341, + 342, 343, 1, 2, 3, 4, 5, 6, 7, 8, + 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, + 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, + 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, + 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, + 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, + 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, + 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, + 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, + 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, + 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, + 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, + 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, + 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, + 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, + 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, + 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, + 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, + 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, + 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, + 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, + 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, + 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, + 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, + 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, + 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, + 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, + 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, + 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, + 289, 290, 291, 292, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 557, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 294, 295, 296, 297, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 299, 300, 301, 302, 303, 304, 305, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 306, 307, 308, 309, 310, 311, 0, 0, + 0, 0, 0, 0, 0, 0, 312, 0, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, - 335, 1, 2, 3, 4, 5, 6, 7, 8, 9, - 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, - 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, - 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, - 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, - 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, - 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, - 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, - 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, - 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, - 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, - 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, - 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, - 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, - 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, - 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, - 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, - 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, - 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, - 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, - 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, - 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, - 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, - 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, - 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, - 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, - 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, - 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, - 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, - 290, 0, 0, 414, 415, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 416, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 480, 417, 418, 419, 420, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 292, 293, 294, 295, - 296, 0, 0, 0, 421, 422, 423, 424, 425, 297, - 298, 299, 300, 301, 302, 303, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 304, 305, - 306, 307, 308, 309, 426, 427, 428, 429, 430, 431, - 432, 433, 310, 0, 311, 312, 313, 314, 315, 316, - 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, - 327, 328, 329, 330, 331, 332, 333, 334, 335, 1, + 335, 336, 337, 338, 339, 340, 341, 342, 343, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, @@ -1784,25 +1974,149 @@ static const yytype_int16 yytable[] = 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, - 282, 283, 284, 285, 286, 287, 288, 289, 290, 0, - 0, 414, 415, 0, 0, 0, 0, 0, 0, 0, + 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, + 292, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 416, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 417, 418, 419, 420, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 292, 293, 294, 295, 0, 0, - 0, 0, 421, 422, 423, 424, 425, 297, 298, 299, - 300, 301, 302, 303, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 304, 305, 306, 307, - 308, 309, 426, 427, 428, 429, 430, 431, 432, 433, - 310, 0, 311, 312, 313, 314, 315, 316, 317, 318, - 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, - 329, 330, 331, 332, 333, 334, 335, 1, 2, 3, + 0, 0, 0, 0, 0, 0, 0, 0, 639, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 294, 295, 296, + 297, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 299, 300, 301, 302, 303, 304, 305, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 306, + 307, 308, 309, 310, 311, 0, 0, 0, 0, 0, + 0, 0, 0, 312, 0, 313, 314, 315, 316, 317, + 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, + 328, 329, 330, 331, 332, 333, 334, 335, 336, 337, + 338, 339, 340, 341, 342, 343, 1, 2, 3, 4, + 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, + 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, + 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, + 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, + 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, + 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, + 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, + 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, + 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, + 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, + 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, + 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, + 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, + 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, + 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, + 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, + 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, + 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, + 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, + 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, + 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, + 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, + 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, + 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, + 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, + 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, + 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, + 285, 286, 287, 288, 289, 290, 291, 292, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 677, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 294, 295, 296, 297, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 299, 300, 301, + 302, 303, 304, 305, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 306, 307, 308, 309, + 310, 311, 0, 0, 0, 0, 0, 0, 0, 0, + 312, 0, 313, 314, 315, 316, 317, 318, 319, 320, + 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, + 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, + 341, 342, 343, 1, 2, 3, 4, 5, 6, 7, + 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, + 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, + 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, + 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, + 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, + 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, + 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, + 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, + 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, + 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, + 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, + 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, + 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, + 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, + 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, + 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, + 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, + 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, + 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, + 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, + 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, + 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, + 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, + 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, + 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, + 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, + 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, + 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, + 288, 289, 290, 291, 292, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 294, 295, 296, 297, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 299, 300, 301, 302, 303, 304, + 305, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 306, 307, 308, 309, 310, 311, 0, + 0, 0, 0, 0, 0, 0, 0, 312, 0, 313, + 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, + 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, + 334, 335, 336, 337, 338, 339, 340, 341, 342, 343, + 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, + 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, + 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, + 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, + 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, + 52, 53, 54, 55, 56, 57, 58, 0, 0, 61, + 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, + 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, + 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, + 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, + 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, + 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, + 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, + 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, + 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, + 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, + 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, + 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, + 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, + 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, + 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, + 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, + 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, + 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, + 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, + 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, + 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, + 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, + 292, 0, 0, 422, 423, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 424, 425, 0, 0, 0, 529, 696, 0, + 0, 0, 0, 0, 426, 427, 428, 429, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 430, 431, 432, 433, 434, + 299, 0, 0, 0, 0, 304, 305, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 435, 436, 437, 438, 439, + 440, 441, 442, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 325, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, - 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, + 54, 55, 56, 57, 58, 0, 0, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, @@ -1825,25 +2139,24 @@ static const yytype_int16 yytable[] = 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, - 284, 285, 286, 287, 288, 289, 290, 0, 0, 0, + 284, 285, 286, 287, 288, 289, 290, 291, 292, 0, + 0, 422, 423, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 424, 425, 0, 0, 470, 0, 0, 0, 0, 0, + 0, 0, 426, 427, 428, 429, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 291, 0, + 0, 0, 0, 430, 431, 432, 433, 434, 299, 0, + 0, 0, 0, 304, 305, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 292, 293, 294, 295, 296, 0, 0, 0, - 0, 0, 0, 0, 0, 297, 298, 299, 300, 301, - 302, 303, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 304, 305, 306, 307, 308, 309, - 0, 0, 0, 0, 0, 0, 0, 0, 310, 0, - 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, - 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, - 331, 332, 333, 334, 335, 1, 2, 3, 4, 5, + 0, 0, 0, 435, 436, 437, 438, 439, 440, 441, + 442, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 325, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, - 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, + 56, 57, 58, 0, 0, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, @@ -1866,25 +2179,24 @@ static const yytype_int16 yytable[] = 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, - 286, 287, 288, 289, 290, 0, 0, 0, 0, 0, + 286, 287, 288, 289, 290, 291, 292, 0, 0, 422, + 423, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 424, 425, + 0, 0, 0, 529, 0, 0, 0, 0, 0, 0, + 426, 427, 428, 429, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 430, 431, 432, 433, 434, 299, 0, 0, 0, + 0, 304, 305, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 379, 0, 0, 0, + 0, 435, 436, 437, 438, 439, 440, 441, 442, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 292, 293, 294, 295, 0, 0, 0, 0, 0, 0, - 0, 0, 380, 297, 298, 299, 300, 301, 302, 303, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 304, 305, 306, 307, 308, 309, 0, 0, - 0, 0, 0, 0, 0, 0, 310, 0, 311, 312, - 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, - 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, - 333, 334, 335, 1, 2, 3, 4, 5, 6, 7, + 0, 0, 0, 325, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, - 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, + 58, 0, 0, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, @@ -1907,25 +2219,24 @@ static const yytype_int16 yytable[] = 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, - 288, 289, 290, 0, 0, 0, 0, 0, 0, 0, + 288, 289, 290, 291, 292, 0, 0, 422, 423, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 548, + 0, 0, 0, 0, 0, 0, 424, 425, 0, 0, + 585, 0, 0, 0, 0, 0, 0, 0, 426, 427, + 428, 429, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 430, + 431, 432, 433, 434, 299, 0, 0, 0, 0, 304, + 305, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 435, + 436, 437, 438, 439, 440, 441, 442, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 292, 293, - 294, 295, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 297, 298, 299, 300, 301, 302, 303, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 304, 305, 306, 307, 308, 309, 0, 0, 0, 0, - 0, 0, 0, 0, 310, 0, 311, 312, 313, 314, - 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, - 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, - 335, 1, 2, 3, 4, 5, 6, 7, 8, 9, + 0, 325, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, - 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, - 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, + 50, 51, 52, 53, 54, 55, 56, 57, 58, 0, + 0, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, @@ -1948,24 +2259,23 @@ static const yytype_int16 yytable[] = 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, - 290, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 290, 291, 292, 0, 0, 422, 423, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 630, 0, 0, + 0, 0, 0, 0, 424, 425, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 597, 426, 427, 428, 429, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 292, 293, 294, 295, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 297, - 298, 299, 300, 301, 302, 303, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 304, 305, - 306, 307, 308, 309, 0, 0, 0, 0, 0, 0, - 0, 0, 310, 0, 311, 312, 313, 314, 315, 316, - 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, - 327, 328, 329, 330, 331, 332, 333, 334, 335, 1, + 0, 0, 0, 0, 0, 0, 0, 430, 431, 432, + 433, 434, 299, 0, 0, 0, 0, 304, 305, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 435, 436, 437, + 438, 439, 440, 441, 442, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 325, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, - 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, + 52, 53, 54, 55, 56, 57, 58, 0, 0, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, @@ -1988,25 +2298,24 @@ static const yytype_int16 yytable[] = 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, - 282, 283, 284, 285, 286, 287, 288, 289, 290, 0, + 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, + 292, 0, 0, 422, 423, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 424, 425, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 426, 427, 428, 429, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 668, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 430, 431, 432, 433, 434, + 299, 0, 0, 0, 0, 304, 305, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 292, 293, 294, 295, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 297, 298, 299, - 300, 301, 302, 303, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 304, 305, 306, 307, - 308, 309, 0, 0, 0, 0, 0, 0, 0, 0, - 310, 0, 311, 312, 313, 314, 315, 316, 317, 318, - 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, - 329, 330, 331, 332, 333, 334, 335, 1, 2, 3, + 0, 0, 0, 0, 0, 435, 436, 437, 438, 439, + 440, 441, 442, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 325, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, - 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, + 54, 55, 56, 57, 58, 0, 0, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, @@ -2029,301 +2338,27 @@ static const yytype_int16 yytable[] = 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, - 284, 285, 286, 287, 288, 289, 290, 0, 0, 0, + 284, 285, 286, 287, 288, 289, 290, 291, 292, 453, + 0, 422, 423, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 471, 0, 0, 0, 0, 0, 0, + 424, 425, 0, 0, 0, 0, 0, 0, 0, 0, + 537, 538, 426, 427, 428, 429, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 430, 431, 432, 433, 434, 299, 0, + 0, 0, 550, 304, 547, 0, 0, 0, 0, 0, + 0, 0, 0, 471, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 435, 436, 437, 438, 439, 440, 441, + 442, 0, 471, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 325, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 292, 293, 294, 295, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 297, 298, 299, 300, 301, - 302, 303, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 304, 305, 306, 307, 308, 309, - 0, 0, 0, 0, 0, 0, 0, 0, 310, 0, - 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, - 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, - 331, 332, 333, 334, 335, 2, 3, 4, 5, 6, - 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, - 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, - 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, - 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, - 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, - 57, 58, 0, 0, 61, 62, 63, 64, 65, 66, - 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, - 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, - 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, - 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, - 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, - 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, - 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, - 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, - 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, - 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, - 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, - 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, - 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, - 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, - 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, - 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, - 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, - 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, - 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, - 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, - 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, - 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, - 287, 288, 289, 290, 0, 0, 414, 415, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 634, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 416, 0, 0, 0, 520, - 687, 0, 0, 0, 0, 0, 417, 418, 419, 420, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 421, 422, 423, - 424, 425, 297, 0, 0, 0, 0, 302, 303, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 426, 427, 428, - 429, 430, 431, 432, 433, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 318, 2, 3, 4, 5, 6, - 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, - 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, - 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, - 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, - 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, - 57, 58, 0, 0, 61, 62, 63, 64, 65, 66, - 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, - 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, - 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, - 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, - 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, - 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, - 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, - 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, - 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, - 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, - 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, - 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, - 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, - 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, - 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, - 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, - 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, - 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, - 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, - 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, - 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, - 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, - 287, 288, 289, 290, 0, 0, 414, 415, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 416, 0, 0, 461, 0, - 0, 0, 0, 0, 0, 0, 417, 418, 419, 420, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 421, 422, 423, - 424, 425, 297, 0, 0, 0, 0, 302, 303, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 426, 427, 428, - 429, 430, 431, 432, 433, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 318, 2, 3, 4, 5, 6, - 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, - 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, - 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, - 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, - 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, - 57, 58, 0, 0, 61, 62, 63, 64, 65, 66, - 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, - 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, - 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, - 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, - 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, - 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, - 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, - 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, - 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, - 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, - 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, - 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, - 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, - 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, - 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, - 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, - 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, - 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, - 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, - 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, - 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, - 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, - 287, 288, 289, 290, 0, 0, 414, 415, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 416, 0, 0, 0, 520, - 0, 0, 0, 0, 0, 0, 417, 418, 419, 420, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 421, 422, 423, - 424, 425, 297, 0, 0, 0, 0, 302, 303, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 426, 427, 428, - 429, 430, 431, 432, 433, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 318, 2, 3, 4, 5, 6, - 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, - 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, - 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, - 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, - 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, - 57, 58, 0, 0, 61, 62, 63, 64, 65, 66, - 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, - 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, - 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, - 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, - 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, - 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, - 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, - 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, - 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, - 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, - 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, - 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, - 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, - 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, - 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, - 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, - 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, - 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, - 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, - 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, - 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, - 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, - 287, 288, 289, 290, 0, 0, 414, 415, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 416, 0, 0, 576, 0, - 0, 0, 0, 0, 0, 0, 417, 418, 419, 420, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 421, 422, 423, - 424, 425, 297, 0, 0, 0, 0, 302, 303, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 426, 427, 428, - 429, 430, 431, 432, 433, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 318, 2, 3, 4, 5, 6, - 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, - 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, - 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, - 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, - 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, - 57, 58, 0, 0, 61, 62, 63, 64, 65, 66, - 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, - 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, - 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, - 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, - 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, - 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, - 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, - 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, - 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, - 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, - 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, - 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, - 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, - 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, - 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, - 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, - 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, - 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, - 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, - 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, - 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, - 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, - 287, 288, 289, 290, 0, 0, 414, 415, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 416, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 588, 417, 418, 419, 420, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 421, 422, 423, - 424, 425, 297, 0, 0, 0, 0, 302, 303, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 426, 427, 428, - 429, 430, 431, 432, 433, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 318, 2, 3, 4, 5, 6, - 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, - 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, - 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, - 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, - 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, - 57, 58, 0, 0, 61, 62, 63, 64, 65, 66, - 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, - 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, - 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, - 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, - 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, - 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, - 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, - 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, - 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, - 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, - 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, - 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, - 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, - 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, - 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, - 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, - 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, - 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, - 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, - 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, - 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, - 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, - 287, 288, 289, 290, 0, 0, 414, 415, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 416, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 417, 418, 419, 420, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 421, 422, 423, - 424, 425, 297, 0, 0, 0, 0, 302, 303, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 426, 427, 428, - 429, 430, 431, 432, 433, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 318, 2, 3, 4, 5, 6, - 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, - 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, - 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, - 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, - 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, - 57, 58, 0, 0, 61, 62, 63, 64, 65, 66, - 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, - 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, - 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, - 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, - 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, - 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, - 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, - 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, - 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, - 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, - 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, - 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, - 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, - 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, - 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, - 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, - 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, - 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, - 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, - 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, - 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, - 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, - 287, 288, 289, 290, 0, 0, 414, 415, 0, 444, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 462, 0, 416, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 417, 418, 419, 420, - 528, 529, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 421, 422, 423, - 424, 425, 297, 0, 0, 0, 0, 302, 538, 0, - 0, 541, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 462, 0, 0, 0, 0, 426, 427, 428, - 429, 430, 431, 432, 433, 0, 0, 0, 0, 0, - 0, 462, 0, 0, 318, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 625, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 631, 632, 633, 462, 462, 462, 462, 462, 462, - 462, 462, 462, 462, 462, 462, 462, 462, 462, 462, + 0, 0, 640, 641, 642, 471, 471, 471, 471, 471, + 471, 471, 471, 471, 471, 471, 471, 471, 471, 471, + 471, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, @@ -2333,166 +2368,127 @@ static const yytype_int16 yytable[] = 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 462 + 471 }; static const yytype_int16 yycheck[] = { - 0, 321, 0, 455, 0, 358, 315, 314, 361, 0, - 363, 364, 321, 333, 367, 4, 5, 6, 7, 8, - 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, - 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, - 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, - 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, - 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, - 59, 60, 524, 314, 63, 64, 65, 66, 67, 68, - 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, - 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, - 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, - 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, - 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, - 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, - 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, - 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, - 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, - 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, - 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, - 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, - 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, - 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, - 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, - 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, - 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, - 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, - 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, - 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, - 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, - 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, - 289, 290, 291, 292, 341, 349, 416, 378, 506, 518, - 408, 520, 397, 384, 523, 580, 453, 656, 297, 298, - 302, 324, 350, 316, 318, 293, 294, 314, 350, 316, - 374, 324, 316, 370, 405, 406, 323, 299, 300, 316, - 324, 341, 315, 371, 442, 684, 323, 350, 321, 349, - 341, 315, 389, 332, 333, 337, 350, 357, 349, 357, - 360, 357, 351, 295, 296, 316, 357, 356, 357, 695, - 370, 456, 323, 317, 374, 315, 702, 321, 488, 370, - 490, 321, 314, 374, 316, 316, 584, 713, 320, 389, - 475, 318, 323, 315, 321, 315, 315, 324, 389, 321, - 400, 321, 321, 321, 315, 614, 324, 451, 321, 400, - 321, 324, 321, 550, 321, 324, 453, 324, 455, 324, - 518, 321, 520, 533, 324, 523, 304, 305, 306, 307, - 308, 309, 310, 311, 312, 313, 339, 340, 341, 537, - 562, 563, 564, 565, 321, 323, 321, 324, 329, 330, - 331, 451, 717, 453, 326, 455, 328, 655, 667, 319, - 451, 321, 453, 332, 455, 321, 322, 321, 322, 558, - 559, 318, 560, 561, 545, 318, 586, 350, 316, 350, - 590, 618, 566, 567, 350, 324, 323, 524, 350, 315, - 314, 316, 350, 350, 336, 335, 301, 303, 707, 334, - 317, 316, 314, 319, 324, 324, 506, 605, 606, 322, - 314, 314, 314, 550, 324, 506, 614, 324, 314, 314, - 350, 350, 315, 317, 524, 723, 317, 350, 314, 321, - 358, 319, 317, 524, 315, 314, 350, 315, 318, 323, - 650, 324, 315, 318, 362, 319, 656, 318, 324, 568, - 550, 569, 572, 570, 673, 571, 458, 296, 573, 550, - 628, 370, 658, 374, 544, 701, 374, 713, 368, 667, - 714, 658, 684, 516, 684, 516, 675, 516, -1, -1, - 580, 618, -1, -1, 584, 357, 696, -1, 673, 580, - -1, 689, -1, 584, -1, -1, -1, -1, -1, -1, - -1, -1, 712, -1, -1, -1, -1, -1, -1, 707, - -1, -1, -1, -1, -1, -1, -1, -1, 618, 656, - -1, -1, -1, -1, -1, -1, -1, 618, -1, -1, - -1, -1, -1, -1, -1, 679, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, 684, -1, -1, - -1, -1, -1, -1, -1, 655, 656, -1, 658, -1, - -1, -1, -1, -1, 655, 656, -1, 658, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, 679, - -1, -1, -1, -1, 684, -1, -1, -1, 679, -1, - -1, -1, -1, 684, -1, 695, -1, -1, -1, -1, - -1, -1, 702, -1, 695, -1, -1, -1, -1, -1, - -1, 702, -1, 713, -1, -1, -1, 717, -1, -1, - -1, -1, 713, 723, -1, -1, 717, -1, -1, -1, - 0, -1, 723, 3, 4, 5, 6, 7, 8, 9, - 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, - 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, - 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, - 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, - 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, - 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, - 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, - 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, - 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, - 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, - 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, - 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, - 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, - 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, - 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, - 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, - 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, - 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, - 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, - 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, - 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, - 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, - 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, - 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, - 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, - 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, - 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, - 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, - 290, 291, 292, -1, -1, -1, -1, -1, -1, -1, + 0, 324, 0, 317, 0, 319, 319, 319, 464, 0, + 319, 319, 326, 336, 327, 327, 317, 326, 326, 317, + 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, + 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, + 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, + 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, + 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, + 54, 55, 56, 57, 58, 59, 60, 533, 327, 63, + 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, + 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, + 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, + 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, + 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, + 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, + 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, + 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, + 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, + 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, + 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, + 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, + 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, + 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, + 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, + 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, + 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, + 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, + 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, + 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, + 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, + 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, + 294, 357, 349, 416, 425, 589, 527, 515, 529, 405, + 321, 532, 462, 386, 353, 299, 300, 304, 665, 392, + 295, 296, 319, 297, 298, 318, 382, 361, 327, 326, + 364, 378, 366, 367, 324, 374, 370, 327, 451, 349, + 413, 414, 353, 317, 324, 319, 693, 357, 349, 323, + 397, 335, 336, 340, 353, 365, 357, 365, 368, 365, + 354, 318, 301, 302, 365, 359, 360, 324, 378, 465, + 320, 353, 382, 704, 324, 335, 497, 378, 499, 324, + 711, 382, 327, 321, 318, 593, 324, 397, 484, 327, + 324, 722, 321, 318, 318, 318, 397, 318, 408, 324, + 324, 324, 623, 324, 460, 318, 322, 408, 324, 559, + 324, 324, 353, 327, 527, 462, 529, 464, 329, 532, + 331, 542, 321, 306, 307, 308, 309, 310, 311, 312, + 313, 314, 315, 546, 571, 572, 573, 574, 324, 324, + 325, 327, 726, 326, 324, 324, 353, 327, 327, 353, + 460, 319, 462, 327, 464, 676, 664, 324, 325, 460, + 326, 462, 353, 464, 342, 343, 344, 332, 333, 334, + 567, 568, 575, 576, 595, 569, 570, 627, 599, 318, + 317, 554, 353, 319, 353, 339, 533, 338, 337, 303, + 305, 320, 319, 317, 322, 716, 327, 327, 317, 317, + 317, 614, 615, 325, 327, 515, 327, 317, 317, 353, + 623, 353, 559, 318, 515, 353, 320, 320, 324, 317, + 361, 320, 322, 533, 732, 318, 317, 353, 318, 321, + 326, 318, 533, 327, 577, 321, 321, 578, 659, 327, + 365, 322, 579, 581, 665, 580, 582, 682, 382, 559, + 382, 298, 467, 553, 637, 667, 710, 722, 559, 723, + 667, 525, 378, 676, 376, 693, 525, 525, 365, 684, + -1, -1, 693, -1, -1, -1, -1, -1, -1, 589, + 627, -1, -1, 593, 705, 698, 682, -1, 589, -1, + -1, -1, 593, -1, -1, -1, -1, -1, -1, -1, + 721, -1, -1, 716, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, 627, 665, -1, + -1, -1, -1, -1, -1, -1, 627, -1, -1, -1, + -1, -1, 688, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, 693, -1, -1, -1, + -1, -1, -1, -1, 664, 665, -1, 667, -1, -1, + -1, -1, -1, 664, 665, -1, 667, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, 688, -1, + -1, -1, -1, 693, -1, -1, -1, 688, -1, -1, + -1, -1, 693, -1, 704, -1, -1, -1, -1, -1, + -1, 711, -1, 704, -1, -1, -1, -1, -1, -1, + 711, -1, 722, -1, -1, -1, 726, -1, -1, -1, + -1, 722, 732, -1, -1, 726, -1, -1, -1, 0, + -1, 732, 3, 4, 5, 6, 7, 8, 9, 10, + 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, + 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, + 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, + 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, + 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, + 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, + 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, + 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, + 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, + 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, + 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, + 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, + 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, + 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, + 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, + 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, + 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, + 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, + 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, + 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, + 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, + 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, + 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, + 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, + 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, + 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, + 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, + 291, 292, 293, 294, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, 324, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, 338, 339, - 340, 341, 342, -1, -1, -1, -1, -1, -1, -1, - -1, 351, 352, 353, 354, 355, 356, 357, -1, -1, + -1, -1, -1, -1, -1, -1, 327, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - 370, 371, 372, 373, 374, 375, -1, -1, -1, -1, - -1, -1, -1, -1, 384, -1, 386, 387, 388, 389, - 390, 391, 392, 393, 394, 395, 396, 397, 398, 399, - 400, 401, 402, 403, 404, 405, 406, 407, 408, 409, - 410, 3, 4, 5, 6, 7, 8, 9, 10, 11, - 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, - 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, - 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, - 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, - 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, - 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, - 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, - 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, - 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, - 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, - 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, - 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, - 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, - 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, - 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, - 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, - 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, - 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, - 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, - 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, - 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, - 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, - 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, - 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, - 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, - 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, - 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, - 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, - 292, -1, -1, 295, 296, -1, -1, -1, -1, -1, + 341, 342, 343, 344, 345, -1, -1, -1, -1, -1, + -1, -1, -1, 354, 355, 356, 357, 358, 359, 360, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, 314, -1, 316, -1, 318, 319, -1, -1, - -1, -1, 324, 325, 326, 327, 328, -1, -1, -1, - -1, -1, -1, -1, -1, -1, 338, 339, 340, 341, - 342, -1, -1, -1, 346, 347, 348, 349, 350, 351, - 352, 353, 354, 355, 356, 357, 358, 359, 360, 361, - -1, 363, 364, 365, 366, 367, 368, 369, 370, 371, - 372, 373, 374, 375, 376, 377, 378, 379, 380, 381, - 382, 383, 384, 385, 386, 387, 388, 389, 390, 391, - 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, - 402, 403, 404, 405, 406, 407, 408, 409, 410, 3, + -1, -1, 373, 374, 375, 376, 377, 378, -1, -1, + -1, -1, -1, -1, -1, -1, 387, -1, 389, 390, + 391, 392, 393, 394, 395, 396, 397, 398, 399, 400, + 401, 402, 403, 404, 405, 406, 407, 408, 409, 410, + 411, 412, 413, 414, 415, 416, 417, 418, 419, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, @@ -2521,101 +2517,62 @@ static const yytype_int16 yycheck[] = 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, - 284, 285, 286, 287, 288, 289, 290, 291, 292, -1, - -1, 295, 296, -1, -1, -1, -1, -1, -1, -1, + 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, + 294, -1, -1, 297, 298, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - 314, -1, 316, -1, 318, 319, -1, -1, -1, -1, - 324, 325, 326, 327, 328, -1, -1, -1, -1, -1, - -1, -1, -1, -1, 338, 339, 340, 341, 342, -1, - -1, -1, 346, 347, 348, 349, 350, 351, 352, 353, - 354, 355, 356, 357, 358, 359, 360, 361, -1, 363, - 364, 365, 366, 367, 368, 369, 370, 371, 372, 373, + -1, -1, 316, 317, -1, 319, -1, 321, 322, -1, + -1, -1, -1, 327, 328, 329, 330, 331, -1, -1, + -1, -1, -1, -1, -1, -1, -1, 341, 342, 343, + 344, 345, -1, -1, -1, 349, 350, 351, 352, 353, + 354, 355, 356, 357, 358, 359, 360, 361, 362, 363, + 364, -1, 366, 367, 368, 369, 370, 371, 372, 373, 374, 375, 376, 377, 378, 379, 380, 381, 382, 383, 384, 385, 386, 387, 388, 389, 390, 391, 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, 402, 403, - 404, 405, 406, 407, 408, 409, 410, 3, 4, 5, - 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, - 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, - 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, - 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, - 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, - 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, - 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, - 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, - 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, - 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, - 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, - 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, - 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, - 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, - 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, - 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, - 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, - 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, - 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, - 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, - 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, - 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, - 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, - 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, - 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, - 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, - 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, - 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, - 286, 287, 288, 289, 290, 291, 292, -1, -1, 295, - 296, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, 314, -1, - 316, -1, 318, -1, -1, -1, -1, -1, 324, 325, - 326, 327, 328, -1, -1, -1, -1, -1, -1, -1, - -1, -1, 338, 339, 340, 341, 342, -1, -1, -1, - 346, 347, 348, 349, 350, 351, 352, 353, 354, 355, - 356, 357, 358, 359, 360, 361, -1, 363, 364, 365, - 366, 367, 368, 369, 370, 371, 372, 373, 374, 375, - 376, 377, 378, 379, 380, 381, 382, 383, 384, 385, - 386, 387, 388, 389, 390, 391, 392, 393, 394, 395, - 396, 397, 398, 399, 400, 401, 402, 403, 404, 405, - 406, 407, 408, 409, 410, 3, 4, 5, 6, 7, - 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, - 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, - 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, - 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, - 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, - 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, - 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, - 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, - 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, - 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, - 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, - 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, - 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, - 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, - 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, - 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, - 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, - 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, - 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, - 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, - 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, - 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, - 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, - 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, - 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, - 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, - 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, - 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, - 288, 289, 290, 291, 292, -1, -1, 295, 296, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, 314, -1, 316, -1, - 318, -1, -1, -1, -1, -1, 324, 325, 326, 327, - 328, -1, -1, -1, -1, -1, -1, -1, -1, -1, - 338, 339, 340, 341, 342, -1, -1, -1, 346, 347, - 348, 349, 350, 351, 352, 353, 354, 355, 356, 357, - 358, 359, 360, 361, -1, 363, 364, 365, 366, 367, - 368, 369, 370, 371, 372, 373, 374, 375, 376, 377, - 378, 379, 380, 381, 382, 383, 384, 385, 386, 387, - 388, 389, 390, 391, 392, 393, 394, 395, 396, 397, - 398, 399, 400, 401, 402, 403, 404, 405, 406, 407, - 408, 409, 410, 3, 4, 5, 6, 7, 8, 9, + 404, 405, 406, 407, 408, 409, 410, 411, 412, 413, + 414, 415, 416, 417, 418, 419, 3, 4, 5, 6, + 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, + 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, + 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, + 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, + 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, + 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, + 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, + 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, + 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, + 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, + 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, + 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, + 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, + 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, + 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, + 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, + 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, + 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, + 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, + 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, + 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, + 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, + 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, + 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, + 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, + 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, + 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, + 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, + 287, 288, 289, 290, 291, 292, 293, 294, -1, -1, + 297, 298, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, 316, + 317, -1, 319, -1, 321, 322, -1, -1, -1, -1, + 327, 328, 329, 330, 331, -1, -1, -1, -1, -1, + -1, -1, -1, -1, 341, 342, 343, 344, 345, -1, + -1, -1, 349, 350, 351, 352, 353, 354, 355, 356, + 357, 358, 359, 360, 361, 362, 363, 364, -1, 366, + 367, 368, 369, 370, 371, 372, 373, 374, 375, 376, + 377, 378, 379, 380, 381, 382, 383, 384, 385, 386, + 387, 388, 389, 390, 391, 392, 393, 394, 395, 396, + 397, 398, 399, 400, 401, 402, 403, 404, 405, 406, + 407, 408, 409, 410, 411, 412, 413, 414, 415, 416, + 417, 418, 419, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, @@ -2644,100 +2601,61 @@ static const yytype_int16 yycheck[] = 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, - 290, 291, 292, -1, -1, 295, 296, -1, -1, -1, + 290, 291, 292, 293, 294, -1, -1, 297, 298, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, 314, -1, 316, -1, -1, -1, - -1, -1, -1, -1, 324, 325, 326, 327, 328, -1, - -1, -1, -1, -1, -1, -1, -1, -1, 338, 339, - 340, 341, 342, -1, -1, -1, 346, 347, 348, 349, + -1, -1, -1, -1, -1, -1, 316, 317, -1, 319, + -1, 321, -1, -1, -1, -1, -1, 327, 328, 329, + 330, 331, -1, -1, -1, -1, -1, -1, -1, -1, + -1, 341, 342, 343, 344, 345, -1, -1, -1, 349, 350, 351, 352, 353, 354, 355, 356, 357, 358, 359, - 360, 361, -1, 363, 364, 365, 366, 367, 368, 369, + 360, 361, 362, 363, 364, -1, 366, 367, 368, 369, 370, 371, 372, 373, 374, 375, 376, 377, 378, 379, 380, 381, 382, 383, 384, 385, 386, 387, 388, 389, 390, 391, 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, 402, 403, 404, 405, 406, 407, 408, 409, - 410, 3, 4, 5, 6, 7, 8, 9, 10, 11, - 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, - 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, - 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, - 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, - 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, - 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, - 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, - 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, - 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, - 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, - 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, - 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, - 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, - 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, - 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, - 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, - 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, - 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, - 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, - 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, - 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, - 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, - 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, - 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, - 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, - 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, - 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, - 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, - 292, -1, -1, 295, 296, -1, -1, -1, -1, -1, + 410, 411, 412, 413, 414, 415, 416, 417, 418, 419, + 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, + 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, + 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, + 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, + 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, + 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, + 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, + 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, + 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, + 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, + 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, + 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, + 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, + 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, + 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, + 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, + 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, + 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, + 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, + 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, + 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, + 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, + 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, + 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, + 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, + 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, + 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, + 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, + 293, 294, -1, -1, 297, 298, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, 314, -1, -1, -1, -1, -1, -1, -1, - -1, -1, 324, 325, 326, 327, 328, -1, -1, -1, - -1, -1, -1, -1, -1, -1, 338, 339, 340, 341, - 342, -1, -1, -1, 346, 347, 348, 349, 350, 351, - 352, 353, 354, 355, 356, 357, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, 370, 371, - 372, 373, 374, 375, 376, 377, 378, 379, 380, 381, - 382, 383, 384, -1, 386, 387, 388, 389, 390, 391, - 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, - 402, 403, 404, 405, 406, 407, 408, 409, 410, 3, - 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, - 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, - 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, - 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, - 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, - 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, - 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, - 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, - 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, - 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, - 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, - 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, - 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, - 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, - 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, - 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, - 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, - 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, - 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, - 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, - 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, - 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, - 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, - 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, - 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, - 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, - 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, - 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, - 284, 285, 286, 287, 288, 289, 290, 291, 292, -1, - -1, 295, 296, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - 314, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, 325, 326, 327, 328, -1, -1, -1, -1, -1, - -1, -1, -1, -1, 338, 339, 340, 341, -1, -1, - -1, -1, 346, 347, 348, 349, 350, 351, 352, 353, - 354, 355, 356, 357, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, 370, 371, 372, 373, - 374, 375, 376, 377, 378, 379, 380, 381, 382, 383, - 384, -1, 386, 387, 388, 389, 390, 391, 392, 393, - 394, 395, 396, 397, 398, 399, 400, 401, 402, 403, - 404, 405, 406, 407, 408, 409, 410, 3, 4, 5, + -1, -1, -1, 316, 317, -1, 319, -1, 321, -1, + -1, -1, -1, -1, 327, 328, 329, 330, 331, -1, + -1, -1, -1, -1, -1, -1, -1, -1, 341, 342, + 343, 344, 345, -1, -1, -1, 349, 350, 351, 352, + 353, 354, 355, 356, 357, 358, 359, 360, 361, 362, + 363, 364, -1, 366, 367, 368, 369, 370, 371, 372, + 373, 374, 375, 376, 377, 378, 379, 380, 381, 382, + 383, 384, 385, 386, 387, 388, 389, 390, 391, 392, + 393, 394, 395, 396, 397, 398, 399, 400, 401, 402, + 403, 404, 405, 406, 407, 408, 409, 410, 411, 412, + 413, 414, 415, 416, 417, 418, 419, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, @@ -2766,19 +2684,145 @@ static const yytype_int16 yycheck[] = 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, - 286, 287, 288, 289, 290, 291, 292, -1, -1, -1, + 286, 287, 288, 289, 290, 291, 292, 293, 294, -1, + -1, 297, 298, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, 324, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, 338, 339, 340, 341, 342, -1, -1, -1, - -1, -1, -1, -1, -1, 351, 352, 353, 354, 355, - 356, 357, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, 370, 371, 372, 373, 374, 375, - -1, -1, -1, -1, -1, -1, -1, -1, 384, -1, + 316, 317, -1, 319, -1, -1, -1, -1, -1, -1, + -1, 327, 328, 329, 330, 331, -1, -1, -1, -1, + -1, -1, -1, -1, -1, 341, 342, 343, 344, 345, + -1, -1, -1, 349, 350, 351, 352, 353, 354, 355, + 356, 357, 358, 359, 360, 361, 362, 363, 364, -1, + 366, 367, 368, 369, 370, 371, 372, 373, 374, 375, + 376, 377, 378, 379, 380, 381, 382, 383, 384, 385, 386, 387, 388, 389, 390, 391, 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, 402, 403, 404, 405, - 406, 407, 408, 409, 410, 3, 4, 5, 6, 7, + 406, 407, 408, 409, 410, 411, 412, 413, 414, 415, + 416, 417, 418, 419, 3, 4, 5, 6, 7, 8, + 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, + 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, + 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, + 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, + 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, + 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, + 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, + 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, + 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, + 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, + 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, + 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, + 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, + 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, + 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, + 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, + 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, + 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, + 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, + 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, + 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, + 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, + 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, + 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, + 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, + 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, + 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, + 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, + 289, 290, 291, 292, 293, 294, -1, -1, 297, 298, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, 316, 317, -1, + -1, -1, -1, -1, -1, -1, -1, -1, 327, 328, + 329, 330, 331, -1, -1, -1, -1, -1, -1, -1, + -1, -1, 341, 342, 343, 344, 345, -1, -1, -1, + 349, 350, 351, 352, 353, 354, 355, 356, 357, 358, + 359, 360, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, 373, 374, 375, 376, 377, 378, + 379, 380, 381, 382, 383, 384, 385, 386, 387, -1, + 389, 390, 391, 392, 393, 394, 395, 396, 397, 398, + 399, 400, 401, 402, 403, 404, 405, 406, 407, 408, + 409, 410, 411, 412, 413, 414, 415, 416, 417, 418, + 419, 3, 4, 5, 6, 7, 8, 9, 10, 11, + 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, + 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, + 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, + 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, + 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, + 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, + 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, + 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, + 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, + 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, + 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, + 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, + 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, + 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, + 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, + 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, + 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, + 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, + 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, + 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, + 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, + 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, + 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, + 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, + 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, + 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, + 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, + 292, 293, 294, -1, -1, 297, 298, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, 316, 317, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, 328, 329, 330, 331, + -1, -1, -1, -1, -1, -1, -1, -1, -1, 341, + 342, 343, 344, -1, -1, -1, -1, 349, 350, 351, + 352, 353, 354, 355, 356, 357, 358, 359, 360, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, 373, 374, 375, 376, 377, 378, 379, 380, 381, + 382, 383, 384, 385, 386, 387, -1, 389, 390, 391, + 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, + 402, 403, 404, 405, 406, 407, 408, 409, 410, 411, + 412, 413, 414, 415, 416, 417, 418, 419, 3, 4, + 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, + 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, + 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, + 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, + 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, + 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, + 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, + 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, + 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, + 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, + 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, + 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, + 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, + 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, + 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, + 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, + 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, + 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, + 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, + 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, + 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, + 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, + 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, + 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, + 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, + 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, + 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, + 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, 327, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, 341, 342, 343, 344, + 345, -1, -1, -1, -1, -1, -1, -1, -1, 354, + 355, 356, 357, 358, 359, 360, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, 373, 374, + 375, 376, 377, 378, -1, -1, -1, -1, -1, -1, + -1, -1, 387, -1, 389, 390, 391, 392, 393, 394, + 395, 396, 397, 398, 399, 400, 401, 402, 403, 404, + 405, 406, 407, 408, 409, 410, 411, 412, 413, 414, + 415, 416, 417, 418, 419, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, @@ -2807,100 +2851,61 @@ static const yytype_int16 yycheck[] = 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, - 288, 289, 290, 291, 292, -1, -1, -1, -1, -1, + 288, 289, 290, 291, 292, 293, 294, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, 324, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, 327, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - 338, 339, 340, 341, -1, -1, -1, -1, -1, -1, - -1, -1, 350, 351, 352, 353, 354, 355, 356, 357, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, 370, 371, 372, 373, 374, 375, -1, -1, - -1, -1, -1, -1, -1, -1, 384, -1, 386, 387, - 388, 389, 390, 391, 392, 393, 394, 395, 396, 397, + -1, -1, -1, 341, 342, 343, 344, -1, -1, -1, + -1, -1, -1, -1, -1, 353, 354, 355, 356, 357, + 358, 359, 360, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, 373, 374, 375, 376, 377, + 378, -1, -1, -1, -1, -1, -1, -1, -1, 387, + -1, 389, 390, 391, 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, 402, 403, 404, 405, 406, 407, - 408, 409, 410, 3, 4, 5, 6, 7, 8, 9, - 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, - 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, - 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, - 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, - 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, - 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, - 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, - 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, - 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, - 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, - 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, - 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, - 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, - 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, - 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, - 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, - 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, - 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, - 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, - 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, - 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, - 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, - 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, - 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, - 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, - 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, - 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, - 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, - 290, 291, 292, -1, -1, -1, -1, -1, -1, -1, + 408, 409, 410, 411, 412, 413, 414, 415, 416, 417, + 418, 419, 3, 4, 5, 6, 7, 8, 9, 10, + 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, + 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, + 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, + 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, + 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, + 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, + 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, + 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, + 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, + 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, + 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, + 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, + 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, + 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, + 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, + 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, + 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, + 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, + 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, + 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, + 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, + 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, + 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, + 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, + 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, + 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, + 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, + 291, 292, 293, 294, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, 319, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, 338, 339, - 340, 341, -1, -1, -1, -1, -1, -1, -1, -1, - -1, 351, 352, 353, 354, 355, 356, 357, -1, -1, + -1, 322, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - 370, 371, 372, 373, 374, 375, -1, -1, -1, -1, - -1, -1, -1, -1, 384, -1, 386, 387, 388, 389, - 390, 391, 392, 393, 394, 395, 396, 397, 398, 399, - 400, 401, 402, 403, 404, 405, 406, 407, 408, 409, - 410, 3, 4, 5, 6, 7, 8, 9, 10, 11, - 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, - 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, - 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, - 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, - 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, - 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, - 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, - 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, - 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, - 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, - 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, - 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, - 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, - 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, - 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, - 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, - 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, - 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, - 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, - 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, - 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, - 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, - 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, - 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, - 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, - 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, - 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, - 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, - 292, -1, -1, -1, -1, -1, -1, -1, -1, -1, + 341, 342, 343, 344, -1, -1, -1, -1, -1, -1, + -1, -1, -1, 354, 355, 356, 357, 358, 359, 360, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, 319, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, 338, 339, 340, 341, - -1, -1, -1, -1, -1, -1, -1, -1, -1, 351, - 352, 353, 354, 355, 356, 357, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, 370, 371, - 372, 373, 374, 375, -1, -1, -1, -1, -1, -1, - -1, -1, 384, -1, 386, 387, 388, 389, 390, 391, - 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, - 402, 403, 404, 405, 406, 407, 408, 409, 410, 3, + -1, -1, 373, 374, 375, 376, 377, 378, -1, -1, + -1, -1, -1, -1, -1, -1, 387, -1, 389, 390, + 391, 392, 393, 394, 395, 396, 397, 398, 399, 400, + 401, 402, 403, 404, 405, 406, 407, 408, 409, 410, + 411, 412, 413, 414, 415, 416, 417, 418, 419, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, @@ -2929,25 +2934,149 @@ static const yytype_int16 yycheck[] = 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, - 284, 285, 286, 287, 288, 289, 290, 291, 292, -1, + 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, + 294, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, 322, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, 319, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, 338, 339, 340, 341, -1, -1, - -1, -1, -1, -1, -1, -1, -1, 351, 352, 353, - 354, 355, 356, 357, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, 370, 371, 372, 373, - 374, 375, -1, -1, -1, -1, -1, -1, -1, -1, - 384, -1, 386, 387, 388, 389, 390, 391, 392, 393, + -1, -1, -1, -1, -1, -1, -1, 341, 342, 343, + 344, -1, -1, -1, -1, -1, -1, -1, -1, -1, + 354, 355, 356, 357, 358, 359, 360, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, 373, + 374, 375, 376, 377, 378, -1, -1, -1, -1, -1, + -1, -1, -1, 387, -1, 389, 390, 391, 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, 402, 403, - 404, 405, 406, 407, 408, 409, 410, 3, 4, 5, + 404, 405, 406, 407, 408, 409, 410, 411, 412, 413, + 414, 415, 416, 417, 418, 419, 3, 4, 5, 6, + 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, + 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, + 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, + 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, + 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, + 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, + 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, + 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, + 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, + 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, + 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, + 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, + 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, + 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, + 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, + 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, + 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, + 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, + 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, + 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, + 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, + 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, + 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, + 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, + 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, + 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, + 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, + 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, + 287, 288, 289, 290, 291, 292, 293, 294, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, 322, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, 341, 342, 343, 344, -1, -1, + -1, -1, -1, -1, -1, -1, -1, 354, 355, 356, + 357, 358, 359, 360, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, 373, 374, 375, 376, + 377, 378, -1, -1, -1, -1, -1, -1, -1, -1, + 387, -1, 389, 390, 391, 392, 393, 394, 395, 396, + 397, 398, 399, 400, 401, 402, 403, 404, 405, 406, + 407, 408, 409, 410, 411, 412, 413, 414, 415, 416, + 417, 418, 419, 3, 4, 5, 6, 7, 8, 9, + 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, + 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, + 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, + 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, + 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, + 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, + 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, + 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, + 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, + 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, + 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, + 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, + 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, + 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, + 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, + 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, + 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, + 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, + 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, + 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, + 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, + 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, + 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, + 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, + 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, + 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, + 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, + 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, + 290, 291, 292, 293, 294, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, 341, 342, 343, 344, -1, -1, -1, -1, -1, + -1, -1, -1, -1, 354, 355, 356, 357, 358, 359, + 360, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, 373, 374, 375, 376, 377, 378, -1, + -1, -1, -1, -1, -1, -1, -1, 387, -1, 389, + 390, 391, 392, 393, 394, 395, 396, 397, 398, 399, + 400, 401, 402, 403, 404, 405, 406, 407, 408, 409, + 410, 411, 412, 413, 414, 415, 416, 417, 418, 419, + 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, + 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, + 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, + 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, + 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, + 54, 55, 56, 57, 58, 59, 60, -1, -1, 63, + 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, + 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, + 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, + 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, + 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, + 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, + 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, + 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, + 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, + 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, + 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, + 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, + 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, + 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, + 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, + 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, + 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, + 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, + 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, + 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, + 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, + 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, + 294, -1, -1, 297, 298, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, 316, 317, -1, -1, -1, 321, 322, -1, + -1, -1, -1, -1, 328, 329, 330, 331, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, 349, 350, 351, 352, 353, + 354, -1, -1, -1, -1, 359, 360, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, 379, 380, 381, 382, 383, + 384, 385, 386, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, 401, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, - 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, + 56, 57, 58, 59, 60, -1, -1, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, @@ -2970,301 +3099,226 @@ static const yytype_int16 yycheck[] = 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, - 286, 287, 288, 289, 290, 291, 292, -1, -1, -1, + 286, 287, 288, 289, 290, 291, 292, 293, 294, -1, + -1, 297, 298, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + 316, 317, -1, -1, 320, -1, -1, -1, -1, -1, + -1, -1, 328, 329, 330, 331, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, 349, 350, 351, 352, 353, 354, -1, + -1, -1, -1, 359, 360, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, 379, 380, 381, 382, 383, 384, 385, + 386, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, 401, 4, 5, 6, 7, + 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, + 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, + 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, + 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, + 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, + 58, 59, 60, -1, -1, 63, 64, 65, 66, 67, + 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, + 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, + 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, + 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, + 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, + 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, + 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, + 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, + 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, + 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, + 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, + 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, + 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, + 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, + 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, + 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, + 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, + 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, + 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, + 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, + 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, + 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, + 288, 289, 290, 291, 292, 293, 294, -1, -1, 297, + 298, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, 316, 317, + -1, -1, -1, 321, -1, -1, -1, -1, -1, -1, + 328, 329, 330, 331, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, 349, 350, 351, 352, 353, 354, -1, -1, -1, + -1, 359, 360, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, 379, 380, 381, 382, 383, 384, 385, 386, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, 401, 4, 5, 6, 7, 8, 9, + 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, + 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, + 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, + 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, + 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, + 60, -1, -1, 63, 64, 65, 66, 67, 68, 69, + 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, + 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, + 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, + 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, + 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, + 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, + 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, + 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, + 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, + 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, + 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, + 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, + 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, + 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, + 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, + 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, + 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, + 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, + 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, + 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, + 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, + 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, + 290, 291, 292, 293, 294, -1, -1, 297, 298, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, 316, 317, -1, -1, + 320, -1, -1, -1, -1, -1, -1, -1, 328, 329, + 330, 331, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, 349, + 350, 351, 352, 353, 354, -1, -1, -1, -1, 359, + 360, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, 379, + 380, 381, 382, 383, 384, 385, 386, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, 401, 4, 5, 6, 7, 8, 9, 10, 11, + 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, + 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, + 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, + 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, + 52, 53, 54, 55, 56, 57, 58, 59, 60, -1, + -1, 63, 64, 65, 66, 67, 68, 69, 70, 71, + 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, + 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, + 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, + 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, + 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, + 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, + 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, + 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, + 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, + 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, + 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, + 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, + 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, + 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, + 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, + 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, + 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, + 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, + 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, + 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, + 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, + 292, 293, 294, -1, -1, 297, 298, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, 316, 317, -1, -1, -1, -1, + -1, -1, -1, -1, -1, 327, 328, 329, 330, 331, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, 349, 350, 351, + 352, 353, 354, -1, -1, -1, -1, 359, 360, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, 379, 380, 381, + 382, 383, 384, 385, 386, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, 401, + 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, + 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, + 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, + 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, + 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, + 54, 55, 56, 57, 58, 59, 60, -1, -1, 63, + 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, + 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, + 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, + 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, + 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, + 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, + 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, + 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, + 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, + 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, + 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, + 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, + 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, + 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, + 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, + 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, + 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, + 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, + 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, + 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, + 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, + 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, + 294, -1, -1, 297, 298, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, 316, 317, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, 328, 329, 330, 331, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, 349, 350, 351, 352, 353, + 354, -1, -1, -1, -1, 359, 360, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, 379, 380, 381, 382, 383, + 384, 385, 386, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, 401, 4, 5, + 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, + 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, + 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, + 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, + 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, + 56, 57, 58, 59, 60, -1, -1, 63, 64, 65, + 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, + 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, + 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, + 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, + 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, + 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, + 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, + 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, + 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, + 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, + 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, + 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, + 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, + 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, + 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, + 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, + 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, + 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, + 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, + 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, + 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, + 286, 287, 288, 289, 290, 291, 292, 293, 294, 391, + -1, 297, 298, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, 405, -1, -1, -1, -1, -1, -1, + 316, 317, -1, -1, -1, -1, -1, -1, -1, -1, + 422, 423, 328, 329, 330, 331, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, 349, 350, 351, 352, 353, 354, -1, + -1, -1, 454, 359, 360, -1, -1, -1, -1, -1, + -1, -1, -1, 465, -1, -1, -1, -1, -1, -1, + -1, -1, -1, 379, 380, 381, 382, 383, 384, 385, + 386, -1, 484, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, 401, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, 338, 339, 340, 341, -1, -1, -1, -1, - -1, -1, -1, -1, -1, 351, 352, 353, 354, 355, - 356, 357, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, 370, 371, 372, 373, 374, 375, - -1, -1, -1, -1, -1, -1, -1, -1, 384, -1, - 386, 387, 388, 389, 390, 391, 392, 393, 394, 395, - 396, 397, 398, 399, 400, 401, 402, 403, 404, 405, - 406, 407, 408, 409, 410, 4, 5, 6, 7, 8, - 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, - 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, - 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, - 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, - 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, - 59, 60, -1, -1, 63, 64, 65, 66, 67, 68, - 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, - 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, - 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, - 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, - 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, - 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, - 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, - 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, - 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, - 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, - 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, - 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, - 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, - 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, - 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, - 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, - 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, - 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, - 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, - 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, - 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, - 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, - 289, 290, 291, 292, -1, -1, 295, 296, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, 551, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, 314, -1, -1, -1, 318, - 319, -1, -1, -1, -1, -1, 325, 326, 327, 328, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, 346, 347, 348, - 349, 350, 351, -1, -1, -1, -1, 356, 357, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, 376, 377, 378, - 379, 380, 381, 382, 383, -1, -1, -1, -1, -1, - -1, -1, -1, -1, 393, 4, 5, 6, 7, 8, - 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, - 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, - 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, - 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, - 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, - 59, 60, -1, -1, 63, 64, 65, 66, 67, 68, - 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, - 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, - 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, - 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, - 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, - 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, - 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, - 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, - 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, - 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, - 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, - 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, - 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, - 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, - 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, - 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, - 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, - 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, - 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, - 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, - 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, - 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, - 289, 290, 291, 292, -1, -1, 295, 296, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, 314, -1, -1, 317, -1, - -1, -1, -1, -1, -1, -1, 325, 326, 327, 328, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, 346, 347, 348, - 349, 350, 351, -1, -1, -1, -1, 356, 357, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, 376, 377, 378, - 379, 380, 381, 382, 383, -1, -1, -1, -1, -1, - -1, -1, -1, -1, 393, 4, 5, 6, 7, 8, - 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, - 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, - 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, - 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, - 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, - 59, 60, -1, -1, 63, 64, 65, 66, 67, 68, - 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, - 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, - 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, - 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, - 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, - 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, - 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, - 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, - 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, - 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, - 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, - 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, - 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, - 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, - 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, - 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, - 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, - 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, - 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, - 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, - 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, - 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, - 289, 290, 291, 292, -1, -1, 295, 296, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, 314, -1, -1, -1, 318, - -1, -1, -1, -1, -1, -1, 325, 326, 327, 328, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, 346, 347, 348, - 349, 350, 351, -1, -1, -1, -1, 356, 357, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, 376, 377, 378, - 379, 380, 381, 382, 383, -1, -1, -1, -1, -1, - -1, -1, -1, -1, 393, 4, 5, 6, 7, 8, - 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, - 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, - 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, - 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, - 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, - 59, 60, -1, -1, 63, 64, 65, 66, 67, 68, - 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, - 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, - 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, - 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, - 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, - 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, - 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, - 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, - 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, - 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, - 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, - 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, - 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, - 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, - 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, - 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, - 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, - 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, - 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, - 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, - 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, - 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, - 289, 290, 291, 292, -1, -1, 295, 296, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, 314, -1, -1, 317, -1, - -1, -1, -1, -1, -1, -1, 325, 326, 327, 328, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, 346, 347, 348, - 349, 350, 351, -1, -1, -1, -1, 356, 357, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, 376, 377, 378, - 379, 380, 381, 382, 383, -1, -1, -1, -1, -1, - -1, -1, -1, -1, 393, 4, 5, 6, 7, 8, - 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, - 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, - 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, - 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, - 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, - 59, 60, -1, -1, 63, 64, 65, 66, 67, 68, - 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, - 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, - 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, - 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, - 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, - 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, - 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, - 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, - 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, - 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, - 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, - 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, - 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, - 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, - 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, - 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, - 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, - 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, - 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, - 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, - 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, - 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, - 289, 290, 291, 292, -1, -1, 295, 296, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, 314, -1, -1, -1, -1, - -1, -1, -1, -1, -1, 324, 325, 326, 327, 328, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, 346, 347, 348, - 349, 350, 351, -1, -1, -1, -1, 356, 357, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, 376, 377, 378, - 379, 380, 381, 382, 383, -1, -1, -1, -1, -1, - -1, -1, -1, -1, 393, 4, 5, 6, 7, 8, - 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, - 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, - 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, - 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, - 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, - 59, 60, -1, -1, 63, 64, 65, 66, 67, 68, - 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, - 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, - 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, - 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, - 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, - 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, - 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, - 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, - 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, - 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, - 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, - 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, - 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, - 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, - 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, - 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, - 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, - 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, - 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, - 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, - 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, - 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, - 289, 290, 291, 292, -1, -1, 295, 296, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, 314, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, 325, 326, 327, 328, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, 346, 347, 348, - 349, 350, 351, -1, -1, -1, -1, 356, 357, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, 376, 377, 378, - 379, 380, 381, 382, 383, -1, -1, -1, -1, -1, - -1, -1, -1, -1, 393, 4, 5, 6, 7, 8, - 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, - 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, - 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, - 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, - 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, - 59, 60, -1, -1, 63, 64, 65, 66, 67, 68, - 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, - 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, - 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, - 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, - 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, - 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, - 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, - 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, - 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, - 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, - 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, - 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, - 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, - 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, - 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, - 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, - 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, - 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, - 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, - 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, - 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, - 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, - 289, 290, 291, 292, -1, -1, 295, 296, -1, 383, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, 397, -1, 314, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, 325, 326, 327, 328, - 414, 415, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, 346, 347, 348, - 349, 350, 351, -1, -1, -1, -1, 356, 357, -1, - -1, 445, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, 456, -1, -1, -1, -1, 376, 377, 378, - 379, 380, 381, 382, 383, -1, -1, -1, -1, -1, - -1, 475, -1, -1, 393, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, 542, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, 555, 556, 557, 558, 559, 560, 561, 562, 563, - 564, 565, 566, 567, 568, 569, 570, 571, 572, 573, + -1, -1, 564, 565, 566, 567, 568, 569, 570, 571, + 572, 573, 574, 575, 576, 577, 578, 579, 580, 581, + 582, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, @@ -3274,7 +3328,7 @@ static const yytype_int16 yycheck[] = -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, 673 + 682 }; /* YYSTOS[STATE-NUM] -- The (internal number of the) accessing @@ -3310,139 +3364,142 @@ static const yytype_uint16 yystos[] = 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, - 292, 324, 338, 339, 340, 341, 342, 351, 352, 353, - 354, 355, 356, 357, 370, 371, 372, 373, 374, 375, - 384, 386, 387, 388, 389, 390, 391, 392, 393, 394, - 395, 396, 397, 398, 399, 400, 401, 402, 403, 404, - 405, 406, 407, 408, 409, 410, 442, 443, 446, 447, - 448, 449, 453, 454, 455, 456, 457, 458, 461, 462, - 463, 464, 465, 467, 472, 473, 474, 515, 516, 517, - 473, 318, 350, 314, 314, 324, 350, 324, 518, 315, - 321, 450, 451, 452, 462, 467, 321, 324, 350, 324, - 350, 463, 467, 332, 469, 470, 0, 516, 467, 476, - 318, 350, 371, 459, 460, 350, 466, 316, 324, 468, - 318, 494, 451, 450, 452, 350, 350, 314, 323, 468, - 318, 321, 324, 445, 295, 296, 314, 325, 326, 327, - 328, 346, 347, 348, 349, 350, 376, 377, 378, 379, - 380, 381, 382, 383, 412, 413, 414, 416, 417, 418, - 419, 420, 421, 422, 423, 424, 465, 467, 471, 468, - 324, 462, 467, 477, 478, 475, 323, 315, 321, 315, - 321, 317, 423, 425, 426, 427, 428, 429, 430, 431, - 432, 433, 434, 435, 436, 316, 324, 316, 318, 319, - 324, 358, 359, 360, 361, 363, 364, 365, 366, 367, - 368, 369, 385, 423, 436, 438, 440, 442, 446, 465, - 467, 483, 484, 485, 486, 487, 495, 496, 497, 498, - 501, 502, 505, 506, 507, 514, 519, 468, 323, 468, - 318, 438, 481, 323, 444, 350, 321, 324, 423, 423, - 440, 295, 296, 316, 320, 315, 315, 321, 357, 438, - 314, 423, 321, 333, 467, 350, 479, 480, 319, 478, - 477, 436, 441, 460, 350, 329, 330, 331, 326, 328, - 293, 294, 297, 298, 332, 333, 299, 300, 336, 335, - 334, 301, 303, 302, 337, 317, 317, 436, 316, 319, - 488, 314, 324, 324, 509, 314, 314, 324, 324, 440, - 314, 440, 322, 324, 304, 305, 306, 307, 308, 309, - 310, 311, 312, 313, 323, 439, 321, 324, 319, 484, - 498, 502, 507, 481, 323, 481, 482, 481, 477, 350, - 315, 415, 440, 350, 438, 423, 479, 468, 321, 324, - 319, 423, 423, 423, 425, 425, 426, 426, 427, 427, - 427, 427, 428, 428, 429, 430, 431, 432, 433, 434, - 437, 317, 350, 520, 521, 495, 508, 484, 510, 440, - 324, 440, 322, 438, 438, 481, 319, 321, 319, 317, - 324, 480, 440, 314, 317, 321, 489, 440, 455, 462, - 500, 358, 483, 496, 511, 315, 315, 319, 481, 322, - 441, 317, 521, 319, 350, 315, 314, 500, 512, 513, - 491, 492, 493, 499, 503, 438, 315, 323, 485, 490, - 494, 440, 324, 315, 362, 487, 485, 318, 481, 315, - 440, 490, 491, 495, 504, 324, 319 + 292, 293, 294, 327, 341, 342, 343, 344, 345, 354, + 355, 356, 357, 358, 359, 360, 373, 374, 375, 376, + 377, 378, 387, 389, 390, 391, 392, 393, 394, 395, + 396, 397, 398, 399, 400, 401, 402, 403, 404, 405, + 406, 407, 408, 409, 410, 411, 412, 413, 414, 415, + 416, 417, 418, 419, 451, 452, 455, 456, 457, 458, + 462, 463, 464, 465, 466, 467, 470, 471, 472, 473, + 474, 476, 481, 482, 483, 524, 525, 526, 482, 321, + 353, 317, 317, 327, 353, 327, 527, 318, 324, 459, + 460, 461, 471, 476, 324, 327, 353, 327, 353, 472, + 476, 335, 478, 479, 0, 525, 476, 485, 321, 353, + 374, 468, 469, 353, 475, 319, 327, 477, 321, 503, + 460, 459, 461, 353, 353, 317, 326, 477, 321, 324, + 327, 454, 297, 298, 316, 317, 328, 329, 330, 331, + 349, 350, 351, 352, 353, 379, 380, 381, 382, 383, + 384, 385, 386, 421, 422, 423, 425, 426, 427, 428, + 429, 430, 431, 432, 433, 474, 476, 480, 477, 327, + 471, 476, 486, 487, 484, 326, 318, 324, 318, 324, + 320, 432, 434, 435, 436, 437, 438, 439, 440, 441, + 442, 443, 444, 445, 319, 327, 319, 321, 322, 327, + 361, 362, 363, 364, 366, 367, 368, 369, 370, 371, + 372, 388, 432, 445, 447, 449, 451, 455, 474, 476, + 492, 493, 494, 495, 496, 504, 505, 506, 507, 510, + 511, 514, 515, 516, 523, 528, 477, 326, 477, 321, + 447, 490, 326, 453, 353, 324, 327, 432, 432, 449, + 297, 298, 319, 323, 318, 318, 324, 360, 447, 317, + 432, 324, 336, 476, 353, 488, 489, 322, 487, 486, + 445, 450, 469, 353, 332, 333, 334, 329, 331, 295, + 296, 299, 300, 335, 336, 301, 302, 339, 338, 337, + 303, 305, 304, 340, 320, 320, 445, 319, 322, 497, + 317, 327, 327, 518, 317, 317, 327, 327, 449, 317, + 449, 325, 327, 306, 307, 308, 309, 310, 311, 312, + 313, 314, 315, 326, 448, 324, 327, 322, 493, 507, + 511, 516, 490, 326, 490, 491, 490, 486, 353, 318, + 424, 449, 353, 447, 432, 488, 477, 324, 327, 322, + 432, 432, 432, 434, 434, 435, 435, 436, 436, 436, + 436, 437, 437, 438, 439, 440, 441, 442, 443, 446, + 320, 353, 529, 530, 504, 517, 493, 519, 449, 327, + 449, 325, 447, 447, 490, 322, 324, 322, 320, 327, + 489, 449, 317, 320, 324, 498, 449, 464, 471, 509, + 361, 492, 505, 520, 318, 318, 322, 490, 325, 450, + 320, 530, 322, 353, 318, 317, 509, 521, 522, 500, + 501, 502, 508, 512, 447, 318, 326, 494, 499, 503, + 449, 327, 318, 365, 496, 494, 321, 490, 318, 449, + 499, 500, 504, 513, 327, 322 }; /* YYR1[YYN] -- Symbol number of symbol that rule YYN derives. */ static const yytype_uint16 yyr1[] = { - 0, 411, 412, 413, 413, 413, 413, 413, 413, 413, - 413, 413, 413, 413, 413, 413, 413, 414, 414, 414, - 414, 414, 414, 415, 416, 417, 418, 418, 419, 419, - 420, 420, 421, 422, 422, 422, 423, 423, 423, 423, - 424, 424, 424, 424, 425, 425, 425, 425, 426, 426, - 426, 427, 427, 427, 428, 428, 428, 428, 428, 429, - 429, 429, 430, 430, 431, 431, 432, 432, 433, 433, - 434, 434, 435, 435, 436, 437, 436, 438, 438, 439, - 439, 439, 439, 439, 439, 439, 439, 439, 439, 439, - 440, 440, 441, 442, 442, 442, 442, 442, 442, 442, - 442, 442, 444, 443, 445, 445, 446, 447, 447, 448, - 448, 449, 450, 450, 451, 451, 451, 451, 452, 453, - 453, 453, 453, 453, 454, 454, 454, 454, 454, 455, - 455, 456, 457, 457, 457, 457, 457, 457, 457, 457, - 458, 459, 459, 460, 460, 460, 461, 462, 462, 463, - 463, 463, 463, 463, 463, 463, 464, 464, 464, 464, - 464, 464, 464, 464, 464, 464, 464, 464, 464, 464, - 464, 464, 464, 464, 464, 464, 464, 464, 464, 464, - 464, 464, 464, 464, 464, 465, 466, 466, 467, 467, - 468, 468, 468, 468, 469, 469, 470, 471, 471, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 472, - 472, 472, 472, 472, 472, 472, 472, 472, 472, 473, - 473, 473, 475, 474, 476, 474, 477, 477, 478, 478, - 479, 479, 480, 480, 481, 481, 481, 482, 482, 483, - 484, 484, 485, 485, 485, 485, 485, 485, 485, 485, - 486, 487, 488, 489, 487, 490, 490, 492, 491, 493, - 491, 494, 494, 495, 495, 496, 496, 497, 497, 498, - 499, 499, 500, 500, 501, 501, 503, 502, 504, 504, - 505, 505, 506, 506, 508, 507, 509, 507, 510, 507, - 511, 511, 512, 512, 513, 513, 514, 514, 514, 514, - 514, 515, 515, 516, 516, 516, 518, 517, 519, 520, - 520, 521, 521 + 0, 420, 421, 422, 422, 422, 422, 422, 422, 422, + 422, 422, 422, 422, 422, 422, 422, 422, 423, 423, + 423, 423, 423, 423, 424, 425, 426, 427, 427, 428, + 428, 429, 429, 430, 431, 431, 431, 432, 432, 432, + 432, 433, 433, 433, 433, 434, 434, 434, 434, 435, + 435, 435, 436, 436, 436, 437, 437, 437, 437, 437, + 438, 438, 438, 439, 439, 440, 440, 441, 441, 442, + 442, 443, 443, 444, 444, 445, 446, 445, 447, 447, + 448, 448, 448, 448, 448, 448, 448, 448, 448, 448, + 448, 449, 449, 450, 451, 451, 451, 451, 451, 451, + 451, 451, 451, 453, 452, 454, 454, 455, 456, 456, + 457, 457, 458, 459, 459, 460, 460, 460, 460, 461, + 462, 462, 462, 462, 462, 463, 463, 463, 463, 463, + 464, 464, 465, 466, 466, 466, 466, 466, 466, 466, + 466, 467, 468, 468, 469, 469, 469, 470, 471, 471, + 472, 472, 472, 472, 472, 472, 472, 473, 473, 473, + 473, 473, 473, 473, 473, 473, 473, 473, 473, 473, + 473, 473, 473, 473, 473, 473, 473, 473, 473, 473, + 473, 473, 473, 473, 473, 473, 473, 473, 473, 473, + 473, 473, 474, 475, 475, 476, 476, 477, 477, 477, + 477, 478, 478, 479, 480, 480, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 481, 481, + 481, 481, 481, 481, 481, 481, 481, 481, 482, 482, + 482, 484, 483, 485, 483, 486, 486, 487, 487, 488, + 488, 489, 489, 490, 490, 490, 491, 491, 492, 493, + 493, 494, 494, 494, 494, 494, 494, 494, 494, 495, + 496, 497, 498, 496, 499, 499, 501, 500, 502, 500, + 503, 503, 504, 504, 505, 505, 506, 506, 507, 508, + 508, 509, 509, 510, 510, 512, 511, 513, 513, 514, + 514, 515, 515, 517, 516, 518, 516, 519, 516, 520, + 520, 521, 521, 522, 522, 523, 523, 523, 523, 523, + 524, 524, 525, 525, 525, 527, 526, 528, 529, 529, + 530, 530 }; /* YYR2[YYN] -- Number of symbols on the right hand side of rule YYN. */ static const yytype_uint8 yyr2[] = { 0, 2, 1, 1, 3, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 4, 1, - 3, 2, 2, 1, 1, 1, 2, 2, 2, 1, - 2, 3, 2, 1, 1, 1, 1, 2, 2, 2, - 1, 1, 1, 1, 1, 3, 3, 3, 1, 3, - 3, 1, 3, 3, 1, 3, 3, 3, 3, 1, - 3, 3, 1, 3, 1, 3, 1, 3, 1, 3, - 1, 3, 1, 3, 1, 0, 6, 1, 3, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 4, + 1, 3, 2, 2, 1, 1, 1, 2, 2, 2, + 1, 2, 3, 2, 1, 1, 1, 1, 2, 2, + 2, 1, 1, 1, 1, 1, 3, 3, 3, 1, + 3, 3, 1, 3, 3, 1, 3, 3, 3, 3, + 1, 3, 3, 1, 3, 1, 3, 1, 3, 1, + 3, 1, 3, 1, 3, 1, 0, 6, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 3, 1, 2, 2, 4, 2, 3, 4, 2, - 3, 4, 0, 6, 2, 3, 2, 1, 1, 2, - 3, 3, 2, 3, 2, 1, 2, 1, 1, 1, - 3, 4, 6, 5, 1, 2, 3, 5, 4, 1, - 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 4, 1, 3, 1, 3, 1, 1, 1, 2, 1, + 1, 1, 3, 1, 2, 2, 4, 2, 3, 4, + 2, 3, 4, 0, 6, 2, 3, 2, 1, 1, + 2, 3, 3, 2, 3, 2, 1, 2, 1, 1, + 1, 3, 4, 6, 5, 1, 2, 3, 5, 4, + 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 4, 1, 3, 1, 3, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 4, 1, 1, 3, 2, 3, - 2, 3, 3, 4, 1, 0, 3, 1, 3, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 4, 1, 1, 3, 2, 3, 2, 3, 3, + 4, 1, 0, 3, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, @@ -3472,16 +3529,16 @@ static const yytype_uint8 yyr2[] = 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 0, 6, 0, 5, 1, 2, 3, 4, - 1, 3, 1, 2, 1, 3, 4, 1, 3, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 2, 2, 0, 0, 5, 1, 1, 0, 2, 0, - 2, 2, 3, 1, 2, 1, 2, 1, 2, 5, - 3, 1, 1, 4, 1, 2, 0, 8, 0, 1, - 3, 2, 1, 2, 0, 6, 0, 8, 0, 7, - 1, 1, 1, 0, 2, 3, 2, 2, 2, 3, - 2, 1, 2, 1, 1, 1, 0, 3, 5, 1, - 3, 1, 4 + 1, 0, 6, 0, 5, 1, 2, 3, 4, 1, + 3, 1, 2, 1, 3, 4, 1, 3, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, + 2, 0, 0, 5, 1, 1, 0, 2, 0, 2, + 2, 3, 1, 2, 1, 2, 1, 2, 5, 3, + 1, 1, 4, 1, 2, 0, 8, 0, 1, 3, + 2, 1, 2, 0, 6, 0, 8, 0, 7, 1, + 1, 1, 0, 2, 3, 2, 2, 2, 3, 2, + 1, 2, 1, 1, 1, 0, 3, 5, 1, 3, + 1, 4 }; @@ -4164,252 +4221,260 @@ yyreduce: switch (yyn) { case 2: -#line 352 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 357 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermTypedNode) = parseContext.handleVariable((yyvsp[0].lex).loc, (yyvsp[0].lex).symbol, (yyvsp[0].lex).string); } -#line 4172 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4229 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 3: -#line 358 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 363 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 4180 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4237 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 4: -#line 361 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 366 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermTypedNode) = (yyvsp[-1].interm.intermTypedNode); if ((yyval.interm.intermTypedNode)->getAsConstantUnion()) (yyval.interm.intermTypedNode)->getAsConstantUnion()->setExpression(); } -#line 4190 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4247 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 5: -#line 366 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 371 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).d, EbtFloat, (yyvsp[0].lex).loc, true); } -#line 4198 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4255 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 6: -#line 369 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 374 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).i, (yyvsp[0].lex).loc, true); } -#line 4206 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4263 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 7: -#line 372 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 377 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "unsigned literal"); (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).u, (yyvsp[0].lex).loc, true); } -#line 4215 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4272 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 8: -#line 376 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 381 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).b, (yyvsp[0].lex).loc, true); } -#line 4223 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4280 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 9: -#line 380 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 385 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).string, (yyvsp[0].lex).loc, true); + } +#line 4288 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 10: +#line 388 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit signed literal"); (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).i, (yyvsp[0].lex).loc, true); } -#line 4232 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4297 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 10: -#line 384 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 11: +#line 392 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit signed literal"); (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).u, (yyvsp[0].lex).loc, true); } -#line 4241 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4306 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 11: -#line 388 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 12: +#line 396 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.int64Check((yyvsp[0].lex).loc, "64-bit integer literal"); (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).i64, (yyvsp[0].lex).loc, true); } -#line 4250 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4315 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 12: -#line 392 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 13: +#line 400 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.int64Check((yyvsp[0].lex).loc, "64-bit unsigned integer literal"); (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).u64, (yyvsp[0].lex).loc, true); } -#line 4259 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4324 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 13: -#line 396 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 14: +#line 404 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitInt16Check((yyvsp[0].lex).loc, "16-bit integer literal"); (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((short)(yyvsp[0].lex).i, (yyvsp[0].lex).loc, true); } -#line 4268 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4333 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 14: -#line 400 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 15: +#line 408 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitInt16Check((yyvsp[0].lex).loc, "16-bit unsigned integer literal"); (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((unsigned short)(yyvsp[0].lex).u, (yyvsp[0].lex).loc, true); } -#line 4277 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4342 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 15: -#line 404 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 16: +#line 412 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double literal"); if (! parseContext.symbolTable.atBuiltInLevel()) parseContext.doubleCheck((yyvsp[0].lex).loc, "double literal"); (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).d, EbtDouble, (yyvsp[0].lex).loc, true); } -#line 4286 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4353 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 16: -#line 408 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 17: +#line 418 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16Check((yyvsp[0].lex).loc, "half float literal"); (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).d, EbtFloat16, (yyvsp[0].lex).loc, true); } -#line 4295 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 17: -#line 416 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); - } -#line 4303 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4362 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 18: -#line 419 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = parseContext.handleBracketDereference((yyvsp[-2].lex).loc, (yyvsp[-3].interm.intermTypedNode), (yyvsp[-1].interm.intermTypedNode)); - } -#line 4311 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 19: -#line 422 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 426 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 4319 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4370 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 19: +#line 429 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + (yyval.interm.intermTypedNode) = parseContext.handleBracketDereference((yyvsp[-2].lex).loc, (yyvsp[-3].interm.intermTypedNode), (yyvsp[-1].interm.intermTypedNode)); + } +#line 4378 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 20: -#line 425 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 432 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { - (yyval.interm.intermTypedNode) = parseContext.handleDotDereference((yyvsp[0].lex).loc, (yyvsp[-2].interm.intermTypedNode), *(yyvsp[0].lex).string); + (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 4327 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4386 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 21: -#line 428 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 435 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + (yyval.interm.intermTypedNode) = parseContext.handleDotDereference((yyvsp[0].lex).loc, (yyvsp[-2].interm.intermTypedNode), *(yyvsp[0].lex).string); + } +#line 4394 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 22: +#line 438 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.variableCheck((yyvsp[-1].interm.intermTypedNode)); parseContext.lValueErrorCheck((yyvsp[0].lex).loc, "++", (yyvsp[-1].interm.intermTypedNode)); (yyval.interm.intermTypedNode) = parseContext.handleUnaryMath((yyvsp[0].lex).loc, "++", EOpPostIncrement, (yyvsp[-1].interm.intermTypedNode)); } -#line 4337 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4404 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 22: -#line 433 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 23: +#line 443 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.variableCheck((yyvsp[-1].interm.intermTypedNode)); parseContext.lValueErrorCheck((yyvsp[0].lex).loc, "--", (yyvsp[-1].interm.intermTypedNode)); (yyval.interm.intermTypedNode) = parseContext.handleUnaryMath((yyvsp[0].lex).loc, "--", EOpPostDecrement, (yyvsp[-1].interm.intermTypedNode)); } -#line 4347 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4414 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 23: -#line 441 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 24: +#line 451 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.integerCheck((yyvsp[0].interm.intermTypedNode), "[]"); (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 4356 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4423 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 24: -#line 448 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 25: +#line 458 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermTypedNode) = parseContext.handleFunctionCall((yyvsp[0].interm).loc, (yyvsp[0].interm).function, (yyvsp[0].interm).intermNode); delete (yyvsp[0].interm).function; } -#line 4365 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 25: -#line 455 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm) = (yyvsp[0].interm); - } -#line 4373 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4432 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 26: -#line 461 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm) = (yyvsp[-1].interm); - (yyval.interm).loc = (yyvsp[0].lex).loc; - } -#line 4382 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 27: #line 465 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm) = (yyvsp[-1].interm); - (yyval.interm).loc = (yyvsp[0].lex).loc; - } -#line 4391 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 28: -#line 472 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm) = (yyvsp[-1].interm); - } -#line 4399 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 29: -#line 475 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm) = (yyvsp[0].interm); } -#line 4407 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4440 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 27: +#line 471 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + (yyval.interm) = (yyvsp[-1].interm); + (yyval.interm).loc = (yyvsp[0].lex).loc; + } +#line 4449 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 28: +#line 475 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + (yyval.interm) = (yyvsp[-1].interm); + (yyval.interm).loc = (yyvsp[0].lex).loc; + } +#line 4458 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 29: +#line 482 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + (yyval.interm) = (yyvsp[-1].interm); + } +#line 4466 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 30: -#line 481 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 485 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + (yyval.interm) = (yyvsp[0].interm); + } +#line 4474 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 31: +#line 491 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { TParameter param = { 0, new TType }; param.type->shallowCopy((yyvsp[0].interm.intermTypedNode)->getType()); @@ -4417,11 +4482,11 @@ yyreduce: (yyval.interm).function = (yyvsp[-1].interm).function; (yyval.interm).intermNode = (yyvsp[0].interm.intermTypedNode); } -#line 4419 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4486 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 31: -#line 488 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 32: +#line 498 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { TParameter param = { 0, new TType }; param.type->shallowCopy((yyvsp[0].interm.intermTypedNode)->getType()); @@ -4429,29 +4494,29 @@ yyreduce: (yyval.interm).function = (yyvsp[-2].interm).function; (yyval.interm).intermNode = parseContext.intermediate.growAggregate((yyvsp[-2].interm).intermNode, (yyvsp[0].interm.intermTypedNode), (yyvsp[-1].lex).loc); } -#line 4431 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 32: -#line 498 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm) = (yyvsp[-1].interm); - } -#line 4439 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4498 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 33: -#line 506 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 508 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + (yyval.interm) = (yyvsp[-1].interm); + } +#line 4506 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 34: +#line 516 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { // Constructor (yyval.interm).intermNode = 0; (yyval.interm).function = parseContext.handleConstructorCall((yyvsp[0].interm.type).loc, (yyvsp[0].interm.type)); } -#line 4449 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4516 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 34: -#line 511 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 35: +#line 521 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { // // Should be a method or subroutine call, but we haven't recognized the arguments yet. @@ -4479,50 +4544,50 @@ yyreduce: (yyval.interm).function = new TFunction(empty, TType(EbtVoid), EOpNull); } } -#line 4481 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4548 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 35: -#line 539 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 36: +#line 549 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { // Constructor (yyval.interm).intermNode = 0; (yyval.interm).function = parseContext.handleConstructorCall((yyvsp[0].interm.type).loc, (yyvsp[0].interm.type)); } -#line 4491 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4558 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 36: -#line 548 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 37: +#line 558 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.variableCheck((yyvsp[0].interm.intermTypedNode)); (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); if (TIntermMethod* method = (yyvsp[0].interm.intermTypedNode)->getAsMethodNode()) parseContext.error((yyvsp[0].interm.intermTypedNode)->getLoc(), "incomplete method syntax", method->getMethodName().c_str(), ""); } -#line 4502 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4569 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 37: -#line 554 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 38: +#line 564 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.lValueErrorCheck((yyvsp[-1].lex).loc, "++", (yyvsp[0].interm.intermTypedNode)); (yyval.interm.intermTypedNode) = parseContext.handleUnaryMath((yyvsp[-1].lex).loc, "++", EOpPreIncrement, (yyvsp[0].interm.intermTypedNode)); } -#line 4511 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4578 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 38: -#line 558 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 39: +#line 568 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.lValueErrorCheck((yyvsp[-1].lex).loc, "--", (yyvsp[0].interm.intermTypedNode)); (yyval.interm.intermTypedNode) = parseContext.handleUnaryMath((yyvsp[-1].lex).loc, "--", EOpPreDecrement, (yyvsp[0].interm.intermTypedNode)); } -#line 4520 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4587 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 39: -#line 562 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 40: +#line 572 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { if ((yyvsp[-1].interm).op != EOpNull) { char errorOp[2] = {0, 0}; @@ -4539,179 +4604,179 @@ yyreduce: (yyval.interm.intermTypedNode)->getAsConstantUnion()->setExpression(); } } -#line 4541 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 40: -#line 582 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpNull; } -#line 4547 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4608 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 41: -#line 583 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpNegative; } -#line 4553 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 592 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpNull; } +#line 4614 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 42: -#line 584 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpLogicalNot; } -#line 4559 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 593 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpNegative; } +#line 4620 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 43: -#line 585 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpBitwiseNot; - parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "bitwise not"); } -#line 4566 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 594 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpLogicalNot; } +#line 4626 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 44: -#line 591 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 4572 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 595 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpBitwiseNot; + parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "bitwise not"); } +#line 4633 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 45: -#line 592 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 601 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } +#line 4639 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 46: +#line 602 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "*", EOpMul, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); if ((yyval.interm.intermTypedNode) == 0) (yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode); } -#line 4582 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4649 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 46: -#line 597 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 47: +#line 607 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "/", EOpDiv, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); if ((yyval.interm.intermTypedNode) == 0) (yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode); } -#line 4592 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4659 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 47: -#line 602 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 48: +#line 612 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.fullIntegerCheck((yyvsp[-1].lex).loc, "%"); (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "%", EOpMod, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); if ((yyval.interm.intermTypedNode) == 0) (yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode); } -#line 4603 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 48: -#line 611 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 4609 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4670 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 49: -#line 612 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 621 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } +#line 4676 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 50: +#line 622 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "+", EOpAdd, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); if ((yyval.interm.intermTypedNode) == 0) (yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode); } -#line 4619 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4686 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 50: -#line 617 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 51: +#line 627 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "-", EOpSub, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); if ((yyval.interm.intermTypedNode) == 0) (yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode); } -#line 4629 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 51: -#line 625 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 4635 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4696 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 52: -#line 626 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 635 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } +#line 4702 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 53: +#line 636 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.fullIntegerCheck((yyvsp[-1].lex).loc, "bit shift left"); (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "<<", EOpLeftShift, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); if ((yyval.interm.intermTypedNode) == 0) (yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode); } -#line 4646 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4713 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 53: -#line 632 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 54: +#line 642 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.fullIntegerCheck((yyvsp[-1].lex).loc, "bit shift right"); (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, ">>", EOpRightShift, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); if ((yyval.interm.intermTypedNode) == 0) (yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode); } -#line 4657 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 54: -#line 641 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 4663 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4724 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 55: -#line 642 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 651 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } +#line 4730 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 56: +#line 652 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "<", EOpLessThan, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); if ((yyval.interm.intermTypedNode) == 0) (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc); } -#line 4673 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4740 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 56: -#line 647 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 57: +#line 657 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, ">", EOpGreaterThan, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); if ((yyval.interm.intermTypedNode) == 0) (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc); } -#line 4683 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4750 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 57: -#line 652 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 58: +#line 662 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "<=", EOpLessThanEqual, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); if ((yyval.interm.intermTypedNode) == 0) (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc); } -#line 4693 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4760 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 58: -#line 657 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 59: +#line 667 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, ">=", EOpGreaterThanEqual, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); if ((yyval.interm.intermTypedNode) == 0) (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc); } -#line 4703 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 59: -#line 665 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 4709 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4770 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 60: -#line 666 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 675 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } +#line 4776 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 61: +#line 676 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.arrayObjectCheck((yyvsp[-1].lex).loc, (yyvsp[-2].interm.intermTypedNode)->getType(), "array comparison"); parseContext.opaqueCheck((yyvsp[-1].lex).loc, (yyvsp[-2].interm.intermTypedNode)->getType(), "=="); @@ -4721,11 +4786,11 @@ yyreduce: if ((yyval.interm.intermTypedNode) == 0) (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc); } -#line 4723 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4790 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 61: -#line 675 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 62: +#line 685 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.arrayObjectCheck((yyvsp[-1].lex).loc, (yyvsp[-2].interm.intermTypedNode)->getType(), "array comparison"); parseContext.opaqueCheck((yyvsp[-1].lex).loc, (yyvsp[-2].interm.intermTypedNode)->getType(), "!="); @@ -4735,124 +4800,124 @@ yyreduce: if ((yyval.interm.intermTypedNode) == 0) (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc); } -#line 4737 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 62: -#line 687 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 4743 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4804 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 63: -#line 688 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 697 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } +#line 4810 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 64: +#line 698 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.fullIntegerCheck((yyvsp[-1].lex).loc, "bitwise and"); (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "&", EOpAnd, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); if ((yyval.interm.intermTypedNode) == 0) (yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode); } -#line 4754 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 64: -#line 697 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 4760 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4821 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 65: -#line 698 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 707 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } +#line 4827 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 66: +#line 708 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.fullIntegerCheck((yyvsp[-1].lex).loc, "bitwise exclusive or"); (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "^", EOpExclusiveOr, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); if ((yyval.interm.intermTypedNode) == 0) (yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode); } -#line 4771 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 66: -#line 707 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 4777 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4838 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 67: -#line 708 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 717 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } +#line 4844 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 68: +#line 718 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.fullIntegerCheck((yyvsp[-1].lex).loc, "bitwise inclusive or"); (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "|", EOpInclusiveOr, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); if ((yyval.interm.intermTypedNode) == 0) (yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode); } -#line 4788 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 68: -#line 717 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 4794 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4855 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 69: -#line 718 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 727 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } +#line 4861 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 70: +#line 728 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "&&", EOpLogicalAnd, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); if ((yyval.interm.intermTypedNode) == 0) (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc); } -#line 4804 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 70: -#line 726 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 4810 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4871 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 71: -#line 727 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 736 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } +#line 4877 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 72: +#line 737 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "^^", EOpLogicalXor, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); if ((yyval.interm.intermTypedNode) == 0) (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc); } -#line 4820 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 72: -#line 735 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 4826 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4887 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 73: -#line 736 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 745 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } +#line 4893 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 74: +#line 746 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "||", EOpLogicalOr, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); if ((yyval.interm.intermTypedNode) == 0) (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc); } -#line 4836 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 74: -#line 744 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 4842 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4903 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 75: -#line 745 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - ++parseContext.controlFlowNestingLevel; - } -#line 4850 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 754 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } +#line 4909 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 76: -#line 748 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 755 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + ++parseContext.controlFlowNestingLevel; + } +#line 4917 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 77: +#line 758 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { --parseContext.controlFlowNestingLevel; parseContext.boolCheck((yyvsp[-4].lex).loc, (yyvsp[-5].interm.intermTypedNode)); @@ -4865,17 +4930,17 @@ yyreduce: (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } } -#line 4867 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 77: -#line 763 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 4873 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4934 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 78: -#line 764 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 773 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } +#line 4940 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 79: +#line 774 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.arrayObjectCheck((yyvsp[-1].interm).loc, (yyvsp[-2].interm.intermTypedNode)->getType(), "array assignment"); parseContext.opaqueCheck((yyvsp[-1].interm).loc, (yyvsp[-2].interm.intermTypedNode)->getType(), "="); @@ -4883,125 +4948,125 @@ yyreduce: parseContext.specializationCheck((yyvsp[-1].interm).loc, (yyvsp[-2].interm.intermTypedNode)->getType(), "="); parseContext.lValueErrorCheck((yyvsp[-1].interm).loc, "assign", (yyvsp[-2].interm.intermTypedNode)); parseContext.rValueErrorCheck((yyvsp[-1].interm).loc, "assign", (yyvsp[0].interm.intermTypedNode)); - (yyval.interm.intermTypedNode) = parseContext.intermediate.addAssign((yyvsp[-1].interm).op, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode), (yyvsp[-1].interm).loc); + (yyval.interm.intermTypedNode) = parseContext.addAssign((yyvsp[-1].interm).loc, (yyvsp[-1].interm).op, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); if ((yyval.interm.intermTypedNode) == 0) { parseContext.assignError((yyvsp[-1].interm).loc, "assign", (yyvsp[-2].interm.intermTypedNode)->getCompleteString(), (yyvsp[0].interm.intermTypedNode)->getCompleteString()); (yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode); } } -#line 4891 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4958 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 79: -#line 780 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 80: +#line 790 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpAssign; } -#line 4900 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4967 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 80: -#line 784 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 81: +#line 794 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpMulAssign; } -#line 4909 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4976 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 81: -#line 788 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 82: +#line 798 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpDivAssign; } -#line 4918 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4985 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 82: -#line 792 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 83: +#line 802 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "%="); (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpModAssign; } -#line 4928 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 4995 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 83: -#line 797 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 84: +#line 807 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpAddAssign; } -#line 4937 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5004 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 84: -#line 801 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 85: +#line 811 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpSubAssign; } -#line 4946 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5013 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 85: -#line 805 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 86: +#line 815 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "bit-shift left assign"); (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpLeftShiftAssign; } -#line 4955 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5022 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 86: -#line 809 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 87: +#line 819 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "bit-shift right assign"); (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpRightShiftAssign; } -#line 4964 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5031 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 87: -#line 813 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 88: +#line 823 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "bitwise-and assign"); (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpAndAssign; } -#line 4973 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5040 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 88: -#line 817 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 89: +#line 827 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "bitwise-xor assign"); (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpExclusiveOrAssign; } -#line 4982 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5049 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 89: -#line 821 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 90: +#line 831 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "bitwise-or assign"); (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpInclusiveOrAssign; } -#line 4991 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 90: -#line 828 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); - } -#line 4999 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5058 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 91: -#line 831 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 838 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); + } +#line 5066 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 92: +#line 841 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.samplerConstructorLocationCheck((yyvsp[-1].lex).loc, ",", (yyvsp[0].interm.intermTypedNode)); (yyval.interm.intermTypedNode) = parseContext.intermediate.addComma((yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode), (yyvsp[-1].lex).loc); @@ -5010,40 +5075,40 @@ yyreduce: (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } } -#line 5012 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5079 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 92: -#line 842 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 93: +#line 852 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.constantValueCheck((yyvsp[0].interm.intermTypedNode), ""); (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 5021 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5088 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 93: -#line 849 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 94: +#line 859 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.handleFunctionDeclarator((yyvsp[-1].interm).loc, *(yyvsp[-1].interm).function, true /* prototype */); (yyval.interm.intermNode) = 0; // TODO: 4.0 functionality: subroutines: make the identifier a user type for this signature } -#line 5031 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5098 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 94: -#line 854 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 95: +#line 864 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { if ((yyvsp[-1].interm).intermNode && (yyvsp[-1].interm).intermNode->getAsAggregate()) (yyvsp[-1].interm).intermNode->getAsAggregate()->setOperator(EOpSequence); (yyval.interm.intermNode) = (yyvsp[-1].interm).intermNode; } -#line 5041 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5108 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 95: -#line 859 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 96: +#line 869 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.profileRequires((yyvsp[-3].lex).loc, ENoProfile, 130, 0, "precision statement"); // lazy setting of the previous scope's defaults, has effect only the first time it is called in a particular scope @@ -5051,75 +5116,75 @@ yyreduce: parseContext.setDefaultPrecision((yyvsp[-3].lex).loc, (yyvsp[-1].interm.type), (yyvsp[-2].interm.type).qualifier.precision); (yyval.interm.intermNode) = 0; } -#line 5053 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5120 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 96: -#line 866 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 97: +#line 876 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.declareBlock((yyvsp[-1].interm).loc, *(yyvsp[-1].interm).typeList); (yyval.interm.intermNode) = 0; } -#line 5062 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5129 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 97: -#line 870 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 98: +#line 880 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.declareBlock((yyvsp[-2].interm).loc, *(yyvsp[-2].interm).typeList, (yyvsp[-1].lex).string); (yyval.interm.intermNode) = 0; } -#line 5071 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5138 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 98: -#line 874 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 99: +#line 884 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.declareBlock((yyvsp[-3].interm).loc, *(yyvsp[-3].interm).typeList, (yyvsp[-2].lex).string, (yyvsp[-1].interm).arraySizes); (yyval.interm.intermNode) = 0; } -#line 5080 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5147 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 99: -#line 878 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 100: +#line 888 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.globalQualifierFixCheck((yyvsp[-1].interm.type).loc, (yyvsp[-1].interm.type).qualifier); parseContext.updateStandaloneQualifierDefaults((yyvsp[-1].interm.type).loc, (yyvsp[-1].interm.type)); (yyval.interm.intermNode) = 0; } -#line 5090 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5157 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 100: -#line 883 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 101: +#line 893 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.checkNoShaderLayouts((yyvsp[-2].interm.type).loc, (yyvsp[-2].interm.type).shaderQualifiers); parseContext.addQualifierToExisting((yyvsp[-2].interm.type).loc, (yyvsp[-2].interm.type).qualifier, *(yyvsp[-1].lex).string); (yyval.interm.intermNode) = 0; } -#line 5100 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5167 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 101: -#line 888 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 102: +#line 898 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.checkNoShaderLayouts((yyvsp[-3].interm.type).loc, (yyvsp[-3].interm.type).shaderQualifiers); (yyvsp[-1].interm.identifierList)->push_back((yyvsp[-2].lex).string); parseContext.addQualifierToExisting((yyvsp[-3].interm.type).loc, (yyvsp[-3].interm.type).qualifier, *(yyvsp[-1].interm.identifierList)); (yyval.interm.intermNode) = 0; } -#line 5111 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 102: -#line 897 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { parseContext.nestedBlockCheck((yyvsp[-2].interm.type).loc); } -#line 5117 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5178 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 103: -#line 897 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 907 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { parseContext.nestedBlockCheck((yyvsp[-2].interm.type).loc); } +#line 5184 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 104: +#line 907 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { --parseContext.structNestingLevel; parseContext.blockName = (yyvsp[-4].lex).string; @@ -5129,54 +5194,54 @@ yyreduce: (yyval.interm).loc = (yyvsp[-5].interm.type).loc; (yyval.interm).typeList = (yyvsp[-1].interm.typeList); } -#line 5131 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5198 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 104: -#line 908 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 105: +#line 918 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.identifierList) = new TIdentifierList; (yyval.interm.identifierList)->push_back((yyvsp[0].lex).string); } -#line 5140 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5207 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 105: -#line 912 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 106: +#line 922 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.identifierList) = (yyvsp[-2].interm.identifierList); (yyval.interm.identifierList)->push_back((yyvsp[0].lex).string); } -#line 5149 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5216 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 106: -#line 919 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 107: +#line 929 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm).function = (yyvsp[-1].interm.function); (yyval.interm).loc = (yyvsp[0].lex).loc; } -#line 5158 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 107: -#line 926 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.function) = (yyvsp[0].interm.function); - } -#line 5166 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5225 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 108: -#line 929 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 936 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.function) = (yyvsp[0].interm.function); } -#line 5174 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5233 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 109: -#line 936 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 939 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + (yyval.interm.function) = (yyvsp[0].interm.function); + } +#line 5241 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 110: +#line 946 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { // Add the parameter (yyval.interm.function) = (yyvsp[-1].interm.function); @@ -5185,11 +5250,11 @@ yyreduce: else delete (yyvsp[0].interm).param.type; } -#line 5187 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5254 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 110: -#line 944 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 111: +#line 954 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { // // Only first parameter of one-parameter functions can be void @@ -5207,11 +5272,11 @@ yyreduce: (yyvsp[-2].interm.function)->addParameter((yyvsp[0].interm).param); } } -#line 5209 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5276 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 111: -#line 964 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 112: +#line 974 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { if ((yyvsp[-2].interm.type).qualifier.storage != EvqGlobal && (yyvsp[-2].interm.type).qualifier.storage != EvqTemporary) { parseContext.error((yyvsp[-1].lex).loc, "no qualifiers allowed for function return", @@ -5231,11 +5296,11 @@ yyreduce: function = new TFunction((yyvsp[-1].lex).string, type); (yyval.interm.function) = function; } -#line 5233 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5300 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 112: -#line 987 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 113: +#line 997 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { if ((yyvsp[-1].interm.type).arraySizes) { parseContext.profileRequires((yyvsp[-1].interm.type).loc, ENoProfile, 120, E_GL_3DL_array_objects, "arrayed type"); @@ -5251,11 +5316,11 @@ yyreduce: (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).param = param; } -#line 5253 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5320 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 113: -#line 1002 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 114: +#line 1012 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { if ((yyvsp[-2].interm.type).arraySizes) { parseContext.profileRequires((yyvsp[-2].interm.type).loc, ENoProfile, 120, E_GL_3DL_array_objects, "arrayed type"); @@ -5275,11 +5340,11 @@ yyreduce: (yyval.interm).loc = (yyvsp[-1].lex).loc; (yyval.interm).param = param; } -#line 5277 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5344 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 114: -#line 1027 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 115: +#line 1037 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm) = (yyvsp[0].interm); if ((yyvsp[-1].interm.type).qualifier.precision != EpqNone) @@ -5291,11 +5356,11 @@ yyreduce: parseContext.paramCheckFix((yyvsp[-1].interm.type).loc, (yyvsp[-1].interm.type).qualifier, *(yyval.interm).param.type); } -#line 5293 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5360 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 115: -#line 1038 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 116: +#line 1048 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm) = (yyvsp[0].interm); @@ -5303,11 +5368,11 @@ yyreduce: parseContext.paramCheckFixStorage((yyvsp[0].interm).loc, EvqTemporary, *(yyval.interm).param.type); parseContext.precisionQualifierCheck((yyval.interm).loc, (yyval.interm).param.type->getBasicType(), (yyval.interm).param.type->getQualifier()); } -#line 5305 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5372 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 116: -#line 1048 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 117: +#line 1058 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm) = (yyvsp[0].interm); if ((yyvsp[-1].interm.type).qualifier.precision != EpqNone) @@ -5318,11 +5383,11 @@ yyreduce: parseContext.parameterTypeCheck((yyvsp[0].interm).loc, (yyvsp[-1].interm.type).qualifier.storage, *(yyval.interm).param.type); parseContext.paramCheckFix((yyvsp[-1].interm.type).loc, (yyvsp[-1].interm.type).qualifier, *(yyval.interm).param.type); } -#line 5320 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5387 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 117: -#line 1058 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 118: +#line 1068 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm) = (yyvsp[0].interm); @@ -5330,68 +5395,68 @@ yyreduce: parseContext.paramCheckFixStorage((yyvsp[0].interm).loc, EvqTemporary, *(yyval.interm).param.type); parseContext.precisionQualifierCheck((yyval.interm).loc, (yyval.interm).param.type->getBasicType(), (yyval.interm).param.type->getQualifier()); } -#line 5332 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5399 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 118: -#line 1068 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 119: +#line 1078 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { TParameter param = { 0, new TType((yyvsp[0].interm.type)) }; (yyval.interm).param = param; if ((yyvsp[0].interm.type).arraySizes) parseContext.arraySizeRequiredCheck((yyvsp[0].interm.type).loc, *(yyvsp[0].interm.type).arraySizes); } -#line 5343 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 119: -#line 1077 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm) = (yyvsp[0].interm); - } -#line 5351 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5410 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 120: -#line 1080 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 1087 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + (yyval.interm) = (yyvsp[0].interm); + } +#line 5418 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 121: +#line 1090 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm) = (yyvsp[-2].interm); parseContext.declareVariable((yyvsp[0].lex).loc, *(yyvsp[0].lex).string, (yyvsp[-2].interm).type); } -#line 5360 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5427 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 121: -#line 1084 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 122: +#line 1094 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm) = (yyvsp[-3].interm); parseContext.declareVariable((yyvsp[-1].lex).loc, *(yyvsp[-1].lex).string, (yyvsp[-3].interm).type, (yyvsp[0].interm).arraySizes); } -#line 5369 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5436 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 122: -#line 1088 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 123: +#line 1098 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm).type = (yyvsp[-5].interm).type; TIntermNode* initNode = parseContext.declareVariable((yyvsp[-3].lex).loc, *(yyvsp[-3].lex).string, (yyvsp[-5].interm).type, (yyvsp[-2].interm).arraySizes, (yyvsp[0].interm.intermTypedNode)); (yyval.interm).intermNode = parseContext.intermediate.growAggregate((yyvsp[-5].interm).intermNode, initNode, (yyvsp[-1].lex).loc); } -#line 5379 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5446 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 123: -#line 1093 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 124: +#line 1103 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm).type = (yyvsp[-4].interm).type; TIntermNode* initNode = parseContext.declareVariable((yyvsp[-2].lex).loc, *(yyvsp[-2].lex).string, (yyvsp[-4].interm).type, 0, (yyvsp[0].interm.intermTypedNode)); (yyval.interm).intermNode = parseContext.intermediate.growAggregate((yyvsp[-4].interm).intermNode, initNode, (yyvsp[-1].lex).loc); } -#line 5389 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5456 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 124: -#line 1101 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 125: +#line 1111 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm).type = (yyvsp[0].interm.type); (yyval.interm).intermNode = 0; @@ -5399,51 +5464,51 @@ yyreduce: parseContext.declareTypeDefaults((yyval.interm).loc, (yyval.interm).type); } -#line 5401 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5468 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 125: -#line 1108 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 126: +#line 1118 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm).type = (yyvsp[-1].interm.type); (yyval.interm).intermNode = 0; parseContext.declareVariable((yyvsp[0].lex).loc, *(yyvsp[0].lex).string, (yyvsp[-1].interm.type)); } -#line 5411 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5478 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 126: -#line 1113 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 127: +#line 1123 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm).type = (yyvsp[-2].interm.type); (yyval.interm).intermNode = 0; parseContext.declareVariable((yyvsp[-1].lex).loc, *(yyvsp[-1].lex).string, (yyvsp[-2].interm.type), (yyvsp[0].interm).arraySizes); } -#line 5421 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5488 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 127: -#line 1118 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 128: +#line 1128 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm).type = (yyvsp[-4].interm.type); TIntermNode* initNode = parseContext.declareVariable((yyvsp[-3].lex).loc, *(yyvsp[-3].lex).string, (yyvsp[-4].interm.type), (yyvsp[-2].interm).arraySizes, (yyvsp[0].interm.intermTypedNode)); (yyval.interm).intermNode = parseContext.intermediate.growAggregate(0, initNode, (yyvsp[-1].lex).loc); } -#line 5431 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5498 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 128: -#line 1123 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 129: +#line 1133 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm).type = (yyvsp[-3].interm.type); TIntermNode* initNode = parseContext.declareVariable((yyvsp[-2].lex).loc, *(yyvsp[-2].lex).string, (yyvsp[-3].interm.type), 0, (yyvsp[0].interm.intermTypedNode)); (yyval.interm).intermNode = parseContext.intermediate.growAggregate(0, initNode, (yyvsp[-1].lex).loc); } -#line 5441 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5508 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 129: -#line 1132 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 130: +#line 1142 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type) = (yyvsp[0].interm.type); @@ -5454,11 +5519,11 @@ yyreduce: } parseContext.precisionQualifierCheck((yyval.interm.type).loc, (yyval.interm.type).basicType, (yyval.interm.type).qualifier); } -#line 5456 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5523 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 130: -#line 1142 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 131: +#line 1152 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.globalQualifierFixCheck((yyvsp[-1].interm.type).loc, (yyvsp[-1].interm.type).qualifier); parseContext.globalQualifierTypeCheck((yyvsp[-1].interm.type).loc, (yyvsp[-1].interm.type).qualifier, (yyvsp[0].interm.type)); @@ -5483,22 +5548,22 @@ yyreduce: (parseContext.language == EShLangFragment && (yyval.interm.type).qualifier.storage == EvqVaryingIn))) (yyval.interm.type).qualifier.smooth = true; } -#line 5485 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5552 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 131: -#line 1169 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 132: +#line 1179 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.globalCheck((yyvsp[0].lex).loc, "invariant"); parseContext.profileRequires((yyval.interm.type).loc, ENoProfile, 120, 0, "invariant"); (yyval.interm.type).init((yyvsp[0].lex).loc); (yyval.interm.type).qualifier.invariant = true; } -#line 5496 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5563 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 132: -#line 1178 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 133: +#line 1188 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.globalCheck((yyvsp[0].lex).loc, "smooth"); parseContext.profileRequires((yyvsp[0].lex).loc, ENoProfile, 130, 0, "smooth"); @@ -5506,11 +5571,11 @@ yyreduce: (yyval.interm.type).init((yyvsp[0].lex).loc); (yyval.interm.type).qualifier.smooth = true; } -#line 5508 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5575 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 133: -#line 1185 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 134: +#line 1195 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.globalCheck((yyvsp[0].lex).loc, "flat"); parseContext.profileRequires((yyvsp[0].lex).loc, ENoProfile, 130, 0, "flat"); @@ -5518,11 +5583,11 @@ yyreduce: (yyval.interm.type).init((yyvsp[0].lex).loc); (yyval.interm.type).qualifier.flat = true; } -#line 5520 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5587 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 134: -#line 1193 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 135: +#line 1203 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.globalCheck((yyvsp[0].lex).loc, "noperspective"); parseContext.profileRequires((yyvsp[0].lex).loc, EEsProfile, 0, E_GL_NV_shader_noperspective_interpolation, "noperspective"); @@ -5530,11 +5595,11 @@ yyreduce: (yyval.interm.type).init((yyvsp[0].lex).loc); (yyval.interm.type).qualifier.nopersp = true; } -#line 5532 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5599 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 135: -#line 1200 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 136: +#line 1210 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.globalCheck((yyvsp[0].lex).loc, "__explicitInterpAMD"); parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile, 450, E_GL_AMD_shader_explicit_vertex_parameter, "explicit interpolation"); @@ -5542,11 +5607,11 @@ yyreduce: (yyval.interm.type).init((yyvsp[0].lex).loc); (yyval.interm.type).qualifier.explicitInterp = true; } -#line 5544 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5611 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 136: -#line 1207 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 137: +#line 1217 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.globalCheck((yyvsp[0].lex).loc, "pervertexNV"); parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile, 0, E_GL_NV_fragment_shader_barycentric, "fragment shader barycentric"); @@ -5555,11 +5620,11 @@ yyreduce: (yyval.interm.type).init((yyvsp[0].lex).loc); (yyval.interm.type).qualifier.pervertexNV = true; } -#line 5557 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5624 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 137: -#line 1215 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 138: +#line 1225 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { // No need for profile version or extension check. Shader stage already checks both. parseContext.globalCheck((yyvsp[0].lex).loc, "perprimitiveNV"); @@ -5570,11 +5635,11 @@ yyreduce: (yyval.interm.type).init((yyvsp[0].lex).loc); (yyval.interm.type).qualifier.perPrimitiveNV = true; } -#line 5572 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5639 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 138: -#line 1225 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 139: +#line 1235 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { // No need for profile version or extension check. Shader stage already checks both. parseContext.globalCheck((yyvsp[0].lex).loc, "perviewNV"); @@ -5582,11 +5647,11 @@ yyreduce: (yyval.interm.type).init((yyvsp[0].lex).loc); (yyval.interm.type).qualifier.perViewNV = true; } -#line 5584 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5651 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 139: -#line 1232 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 140: +#line 1242 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { // No need for profile version or extension check. Shader stage already checks both. parseContext.globalCheck((yyvsp[0].lex).loc, "taskNV"); @@ -5594,84 +5659,84 @@ yyreduce: (yyval.interm.type).init((yyvsp[0].lex).loc); (yyval.interm.type).qualifier.perTaskNV = true; } -#line 5596 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 140: -#line 1243 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type) = (yyvsp[-1].interm.type); - } -#line 5604 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5663 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 141: -#line 1249 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 1253 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { - (yyval.interm.type) = (yyvsp[0].interm.type); + (yyval.interm.type) = (yyvsp[-1].interm.type); } -#line 5612 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5671 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 142: -#line 1252 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 1259 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + (yyval.interm.type) = (yyvsp[0].interm.type); + } +#line 5679 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 143: +#line 1262 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type) = (yyvsp[-2].interm.type); (yyval.interm.type).shaderQualifiers.merge((yyvsp[0].interm.type).shaderQualifiers); parseContext.mergeObjectLayoutQualifiers((yyval.interm.type).qualifier, (yyvsp[0].interm.type).qualifier, false); } -#line 5622 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5689 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 143: -#line 1259 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 144: +#line 1269 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc); parseContext.setLayoutQualifier((yyvsp[0].lex).loc, (yyval.interm.type), *(yyvsp[0].lex).string); } -#line 5631 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5698 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 144: -#line 1263 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 145: +#line 1273 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[-2].lex).loc); parseContext.setLayoutQualifier((yyvsp[-2].lex).loc, (yyval.interm.type), *(yyvsp[-2].lex).string, (yyvsp[0].interm.intermTypedNode)); } -#line 5640 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5707 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 145: -#line 1267 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 146: +#line 1277 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { // because "shared" is both an identifier and a keyword (yyval.interm.type).init((yyvsp[0].lex).loc); TString strShared("shared"); parseContext.setLayoutQualifier((yyvsp[0].lex).loc, (yyval.interm.type), strShared); } -#line 5650 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5717 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 146: -#line 1276 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 147: +#line 1286 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.profileRequires((yyval.interm.type).loc, ECoreProfile | ECompatibilityProfile, 400, E_GL_ARB_gpu_shader5, "precise"); parseContext.profileRequires((yyvsp[0].lex).loc, EEsProfile, 320, Num_AEP_gpu_shader5, AEP_gpu_shader5, "precise"); (yyval.interm.type).init((yyvsp[0].lex).loc); (yyval.interm.type).qualifier.noContraction = true; } -#line 5661 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 147: -#line 1286 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type) = (yyvsp[0].interm.type); - } -#line 5669 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5728 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 148: -#line 1289 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 1296 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + (yyval.interm.type) = (yyvsp[0].interm.type); + } +#line 5736 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 149: +#line 1299 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type) = (yyvsp[-1].interm.type); if ((yyval.interm.type).basicType == EbtVoid) @@ -5680,112 +5745,112 @@ yyreduce: (yyval.interm.type).shaderQualifiers.merge((yyvsp[0].interm.type).shaderQualifiers); parseContext.mergeQualifiers((yyval.interm.type).loc, (yyval.interm.type).qualifier, (yyvsp[0].interm.type).qualifier, false); } -#line 5682 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 149: -#line 1300 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type) = (yyvsp[0].interm.type); - } -#line 5690 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5749 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 150: -#line 1303 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 1310 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type) = (yyvsp[0].interm.type); } -#line 5698 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5757 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 151: -#line 1306 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 1313 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + (yyval.interm.type) = (yyvsp[0].interm.type); + } +#line 5765 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 152: +#line 1316 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.checkPrecisionQualifier((yyvsp[0].interm.type).loc, (yyvsp[0].interm.type).qualifier.precision); (yyval.interm.type) = (yyvsp[0].interm.type); } -#line 5707 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 152: -#line 1310 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - // allow inheritance of storage qualifier from block declaration - (yyval.interm.type) = (yyvsp[0].interm.type); - } -#line 5716 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5774 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 153: -#line 1314 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 1320 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { // allow inheritance of storage qualifier from block declaration (yyval.interm.type) = (yyvsp[0].interm.type); } -#line 5725 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5783 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 154: -#line 1319 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 1324 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { // allow inheritance of storage qualifier from block declaration (yyval.interm.type) = (yyvsp[0].interm.type); } -#line 5734 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5792 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 155: -#line 1323 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 1329 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { + // allow inheritance of storage qualifier from block declaration (yyval.interm.type) = (yyvsp[0].interm.type); } -#line 5742 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5801 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 156: -#line 1330 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 1333 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + (yyval.interm.type) = (yyvsp[0].interm.type); + } +#line 5809 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 157: +#line 1340 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc); (yyval.interm.type).qualifier.storage = EvqConst; // will later turn into EvqConstReadOnly, if the initializer is not constant } -#line 5751 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5818 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 157: -#line 1334 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 158: +#line 1344 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.globalCheck((yyvsp[0].lex).loc, "inout"); (yyval.interm.type).init((yyvsp[0].lex).loc); (yyval.interm.type).qualifier.storage = EvqInOut; } -#line 5761 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5828 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 158: -#line 1339 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 159: +#line 1349 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.globalCheck((yyvsp[0].lex).loc, "in"); (yyval.interm.type).init((yyvsp[0].lex).loc); // whether this is a parameter "in" or a pipeline "in" will get sorted out a bit later (yyval.interm.type).qualifier.storage = EvqIn; } -#line 5772 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5839 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 159: -#line 1345 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 160: +#line 1355 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.globalCheck((yyvsp[0].lex).loc, "out"); (yyval.interm.type).init((yyvsp[0].lex).loc); // whether this is a parameter "out" or a pipeline "out" will get sorted out a bit later (yyval.interm.type).qualifier.storage = EvqOut; } -#line 5783 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5850 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 160: -#line 1351 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 161: +#line 1361 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.profileRequires((yyvsp[0].lex).loc, ENoProfile, 120, 0, "centroid"); parseContext.profileRequires((yyvsp[0].lex).loc, EEsProfile, 300, 0, "centroid"); @@ -5793,21 +5858,21 @@ yyreduce: (yyval.interm.type).init((yyvsp[0].lex).loc); (yyval.interm.type).qualifier.centroid = true; } -#line 5795 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5862 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 161: -#line 1358 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 162: +#line 1368 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.globalCheck((yyvsp[0].lex).loc, "uniform"); (yyval.interm.type).init((yyvsp[0].lex).loc); (yyval.interm.type).qualifier.storage = EvqUniform; } -#line 5805 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5872 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 162: -#line 1363 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 163: +#line 1373 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.globalCheck((yyvsp[0].lex).loc, "shared"); parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, 430, E_GL_ARB_compute_shader, "shared"); @@ -5816,21 +5881,21 @@ yyreduce: (yyval.interm.type).init((yyvsp[0].lex).loc); (yyval.interm.type).qualifier.storage = EvqShared; } -#line 5818 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5885 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 163: -#line 1371 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 164: +#line 1381 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.globalCheck((yyvsp[0].lex).loc, "buffer"); (yyval.interm.type).init((yyvsp[0].lex).loc); (yyval.interm.type).qualifier.storage = EvqBuffer; } -#line 5828 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5895 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 164: -#line 1377 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 165: +#line 1387 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.requireStage((yyvsp[0].lex).loc, EShLangVertex, "attribute"); parseContext.checkDeprecated((yyvsp[0].lex).loc, ECoreProfile, 130, "attribute"); @@ -5843,11 +5908,11 @@ yyreduce: (yyval.interm.type).init((yyvsp[0].lex).loc); (yyval.interm.type).qualifier.storage = EvqVaryingIn; } -#line 5845 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5912 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 165: -#line 1389 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 166: +#line 1399 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.checkDeprecated((yyvsp[0].lex).loc, ENoProfile, 130, "varying"); parseContext.checkDeprecated((yyvsp[0].lex).loc, ECoreProfile, 130, "varying"); @@ -5862,250 +5927,324 @@ yyreduce: else (yyval.interm.type).qualifier.storage = EvqVaryingIn; } -#line 5864 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5931 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 166: -#line 1403 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 167: +#line 1413 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.globalCheck((yyvsp[0].lex).loc, "patch"); parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangTessControlMask | EShLangTessEvaluationMask), "patch"); (yyval.interm.type).init((yyvsp[0].lex).loc); (yyval.interm.type).qualifier.patch = true; } -#line 5875 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5942 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 167: -#line 1409 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 168: +#line 1419 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.globalCheck((yyvsp[0].lex).loc, "sample"); (yyval.interm.type).init((yyvsp[0].lex).loc); (yyval.interm.type).qualifier.sample = true; } -#line 5885 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 168: -#line 1414 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.globalCheck((yyvsp[0].lex).loc, "hitAttributeNV"); - parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangIntersectNVMask | EShLangClosestHitNVMask - | EShLangAnyHitNVMask), "hitAttributeNV"); - parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile, 460, E_GL_NV_ray_tracing, "hitAttributeNV"); - (yyval.interm.type).init((yyvsp[0].lex).loc); - (yyval.interm.type).qualifier.storage = EvqHitAttrNV; - } -#line 5898 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5952 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 169: -#line 1422 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 1424 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { - parseContext.globalCheck((yyvsp[0].lex).loc, "rayPayloadNV"); - parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangRayGenNVMask | EShLangClosestHitNVMask | - EShLangAnyHitNVMask | EShLangMissNVMask), "rayPayloadNV"); - parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile, 460, E_GL_NV_ray_tracing, "rayPayloadNV"); + parseContext.globalCheck((yyvsp[0].lex).loc, "hitAttributeNV"); + parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangIntersectMask | EShLangClosestHitMask + | EShLangAnyHitMask), "hitAttributeNV"); + parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile, 460, E_GL_NV_ray_tracing, "hitAttributeNV"); (yyval.interm.type).init((yyvsp[0].lex).loc); - (yyval.interm.type).qualifier.storage = EvqPayloadNV; + (yyval.interm.type).qualifier.storage = EvqHitAttr; } -#line 5911 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5965 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 170: -#line 1430 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 1432 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { - parseContext.globalCheck((yyvsp[0].lex).loc, "rayPayloadInNV"); - parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangClosestHitNVMask | - EShLangAnyHitNVMask | EShLangMissNVMask), "rayPayloadInNV"); - parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile, 460, E_GL_NV_ray_tracing, "rayPayloadInNV"); + parseContext.globalCheck((yyvsp[0].lex).loc, "hitAttributeEXT"); + parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangIntersectMask | EShLangClosestHitMask + | EShLangAnyHitMask), "hitAttributeEXT"); + parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile, 460, E_GL_EXT_ray_tracing, "hitAttributeNV"); (yyval.interm.type).init((yyvsp[0].lex).loc); - (yyval.interm.type).qualifier.storage = EvqPayloadInNV; + (yyval.interm.type).qualifier.storage = EvqHitAttr; } -#line 5924 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5978 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 171: -#line 1438 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 1440 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { - parseContext.globalCheck((yyvsp[0].lex).loc, "callableDataNV"); - parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangRayGenNVMask | - EShLangClosestHitNVMask | EShLangMissNVMask | EShLangCallableNVMask), "callableDataNV"); - parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile, 460, E_GL_NV_ray_tracing, "callableDataNV"); + parseContext.globalCheck((yyvsp[0].lex).loc, "rayPayloadNV"); + parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangRayGenMask | EShLangClosestHitMask | + EShLangAnyHitMask | EShLangMissMask), "rayPayloadNV"); + parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile, 460, E_GL_NV_ray_tracing, "rayPayloadNV"); (yyval.interm.type).init((yyvsp[0].lex).loc); - (yyval.interm.type).qualifier.storage = EvqCallableDataNV; + (yyval.interm.type).qualifier.storage = EvqPayload; } -#line 5937 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 5991 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 172: -#line 1446 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 1448 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { - parseContext.globalCheck((yyvsp[0].lex).loc, "callableDataInNV"); - parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangCallableNVMask), "callableDataInNV"); - parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile, 460, E_GL_NV_ray_tracing, "callableDataInNV"); + parseContext.globalCheck((yyvsp[0].lex).loc, "rayPayloadEXT"); + parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangRayGenMask | EShLangClosestHitMask | + EShLangAnyHitMask | EShLangMissMask), "rayPayloadEXT"); + parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile, 460, E_GL_EXT_ray_tracing, "rayPayloadEXT"); (yyval.interm.type).init((yyvsp[0].lex).loc); - (yyval.interm.type).qualifier.storage = EvqCallableDataInNV; + (yyval.interm.type).qualifier.storage = EvqPayload; } -#line 5949 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6004 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 173: -#line 1453 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 1456 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.globalCheck((yyvsp[0].lex).loc, "rayPayloadInNV"); + parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangClosestHitMask | + EShLangAnyHitMask | EShLangMissMask), "rayPayloadInNV"); + parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile, 460, E_GL_NV_ray_tracing, "rayPayloadInNV"); + (yyval.interm.type).init((yyvsp[0].lex).loc); + (yyval.interm.type).qualifier.storage = EvqPayloadIn; + } +#line 6017 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 174: +#line 1464 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.globalCheck((yyvsp[0].lex).loc, "rayPayloadInEXT"); + parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangClosestHitMask | + EShLangAnyHitMask | EShLangMissMask), "rayPayloadInEXT"); + parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile, 460, E_GL_EXT_ray_tracing, "rayPayloadInEXT"); + (yyval.interm.type).init((yyvsp[0].lex).loc); + (yyval.interm.type).qualifier.storage = EvqPayloadIn; + } +#line 6030 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 175: +#line 1472 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.globalCheck((yyvsp[0].lex).loc, "callableDataNV"); + parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangRayGenMask | + EShLangClosestHitMask | EShLangMissMask | EShLangCallableMask), "callableDataNV"); + parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile, 460, E_GL_NV_ray_tracing, "callableDataNV"); + (yyval.interm.type).init((yyvsp[0].lex).loc); + (yyval.interm.type).qualifier.storage = EvqCallableData; + } +#line 6043 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 176: +#line 1480 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.globalCheck((yyvsp[0].lex).loc, "callableDataEXT"); + parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangRayGenMask | + EShLangClosestHitMask | EShLangMissMask | EShLangCallableMask), "callableDataEXT"); + parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile, 460, E_GL_EXT_ray_tracing, "callableDataEXT"); + (yyval.interm.type).init((yyvsp[0].lex).loc); + (yyval.interm.type).qualifier.storage = EvqCallableData; + } +#line 6056 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 177: +#line 1488 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.globalCheck((yyvsp[0].lex).loc, "callableDataInNV"); + parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangCallableMask), "callableDataInNV"); + parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile, 460, E_GL_NV_ray_tracing, "callableDataInNV"); + (yyval.interm.type).init((yyvsp[0].lex).loc); + (yyval.interm.type).qualifier.storage = EvqCallableDataIn; + } +#line 6068 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 178: +#line 1495 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.globalCheck((yyvsp[0].lex).loc, "callableDataInEXT"); + parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangCallableMask), "callableDataInEXT"); + parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile, 460, E_GL_EXT_ray_tracing, "callableDataInEXT"); + (yyval.interm.type).init((yyvsp[0].lex).loc); + (yyval.interm.type).qualifier.storage = EvqCallableDataIn; + } +#line 6080 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 179: +#line 1502 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc); (yyval.interm.type).qualifier.coherent = true; } -#line 5958 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6089 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 174: -#line 1457 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 180: +#line 1506 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc); parseContext.requireExtensions((yyvsp[0].lex).loc, 1, &E_GL_KHR_memory_scope_semantics, "devicecoherent"); (yyval.interm.type).qualifier.devicecoherent = true; } -#line 5968 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6099 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 175: -#line 1462 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 181: +#line 1511 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc); parseContext.requireExtensions((yyvsp[0].lex).loc, 1, &E_GL_KHR_memory_scope_semantics, "queuefamilycoherent"); (yyval.interm.type).qualifier.queuefamilycoherent = true; } -#line 5978 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6109 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 176: -#line 1467 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 182: +#line 1516 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc); parseContext.requireExtensions((yyvsp[0].lex).loc, 1, &E_GL_KHR_memory_scope_semantics, "workgroupcoherent"); (yyval.interm.type).qualifier.workgroupcoherent = true; } -#line 5988 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6119 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 177: -#line 1472 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 183: +#line 1521 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc); parseContext.requireExtensions((yyvsp[0].lex).loc, 1, &E_GL_KHR_memory_scope_semantics, "subgroupcoherent"); (yyval.interm.type).qualifier.subgroupcoherent = true; } -#line 5998 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6129 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 178: -#line 1477 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 184: +#line 1526 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc); parseContext.requireExtensions((yyvsp[0].lex).loc, 1, &E_GL_KHR_memory_scope_semantics, "nonprivate"); (yyval.interm.type).qualifier.nonprivate = true; } -#line 6008 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6139 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 179: -#line 1482 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 185: +#line 1531 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + (yyval.interm.type).init((yyvsp[0].lex).loc); + parseContext.requireExtensions((yyvsp[0].lex).loc, 1, &E_GL_EXT_ray_tracing, "shadercallcoherent"); + (yyval.interm.type).qualifier.shadercallcoherent = true; + } +#line 6149 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 186: +#line 1536 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc); (yyval.interm.type).qualifier.volatil = true; } -#line 6017 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6158 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 180: -#line 1486 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 187: +#line 1540 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc); (yyval.interm.type).qualifier.restrict = true; } -#line 6026 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6167 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 181: -#line 1490 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 188: +#line 1544 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc); (yyval.interm.type).qualifier.readonly = true; } -#line 6035 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6176 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 182: -#line 1494 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 189: +#line 1548 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc); (yyval.interm.type).qualifier.writeonly = true; } -#line 6044 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6185 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 183: -#line 1498 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 190: +#line 1552 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.spvRemoved((yyvsp[0].lex).loc, "subroutine"); parseContext.globalCheck((yyvsp[0].lex).loc, "subroutine"); parseContext.unimplemented((yyvsp[0].lex).loc, "subroutine"); (yyval.interm.type).init((yyvsp[0].lex).loc); } -#line 6055 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6196 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 184: -#line 1504 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 191: +#line 1558 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.spvRemoved((yyvsp[-3].lex).loc, "subroutine"); parseContext.globalCheck((yyvsp[-3].lex).loc, "subroutine"); parseContext.unimplemented((yyvsp[-3].lex).loc, "subroutine"); (yyval.interm.type).init((yyvsp[-3].lex).loc); } -#line 6066 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6207 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 185: -#line 1515 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 192: +#line 1569 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc); (yyval.interm.type).qualifier.nonUniform = true; } -#line 6075 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6216 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 186: -#line 1522 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 193: +#line 1576 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { // TODO } -#line 6083 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6224 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 187: -#line 1525 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 194: +#line 1579 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { // TODO: 4.0 semantics: subroutines // 1) make sure each identifier is a type declared earlier with SUBROUTINE // 2) save all of the identifiers for future comparison with the declared function } -#line 6093 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6234 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 188: -#line 1534 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 195: +#line 1588 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type) = (yyvsp[-1].interm.type); (yyval.interm.type).qualifier.precision = parseContext.getDefaultPrecision((yyval.interm.type)); (yyval.interm.type).typeParameters = (yyvsp[0].interm.typeParameters); } -#line 6103 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6244 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 189: -#line 1539 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 196: +#line 1593 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.arrayOfArrayVersionCheck((yyvsp[0].interm).loc, (yyvsp[0].interm).arraySizes); (yyval.interm.type) = (yyvsp[-2].interm.type); @@ -6113,21 +6252,21 @@ yyreduce: (yyval.interm.type).typeParameters = (yyvsp[-1].interm.typeParameters); (yyval.interm.type).arraySizes = (yyvsp[0].interm).arraySizes; } -#line 6115 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6256 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 190: -#line 1549 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 197: +#line 1603 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm).loc = (yyvsp[-1].lex).loc; (yyval.interm).arraySizes = new TArraySizes; (yyval.interm).arraySizes->addInnerSize(); } -#line 6125 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6266 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 191: -#line 1554 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 198: +#line 1608 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm).loc = (yyvsp[-2].lex).loc; (yyval.interm).arraySizes = new TArraySizes; @@ -6136,20 +6275,20 @@ yyreduce: parseContext.arraySizeCheck((yyvsp[-1].interm.intermTypedNode)->getLoc(), (yyvsp[-1].interm.intermTypedNode), size, "array size"); (yyval.interm).arraySizes->addInnerSize(size); } -#line 6138 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6279 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 192: -#line 1562 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 199: +#line 1616 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm) = (yyvsp[-2].interm); (yyval.interm).arraySizes->addInnerSize(); } -#line 6147 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6288 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 193: -#line 1566 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 200: +#line 1620 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm) = (yyvsp[-3].interm); @@ -6157,35 +6296,35 @@ yyreduce: parseContext.arraySizeCheck((yyvsp[-1].interm.intermTypedNode)->getLoc(), (yyvsp[-1].interm.intermTypedNode), size, "array size"); (yyval.interm).arraySizes->addInnerSize(size); } -#line 6159 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6300 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 194: -#line 1576 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 201: +#line 1630 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.typeParameters) = (yyvsp[0].interm.typeParameters); } -#line 6167 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6308 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 195: -#line 1579 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 202: +#line 1633 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.typeParameters) = 0; } -#line 6175 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6316 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 196: -#line 1585 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 203: +#line 1639 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.typeParameters) = (yyvsp[-1].interm.typeParameters); } -#line 6183 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6324 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 197: -#line 1591 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 204: +#line 1645 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.typeParameters) = new TArraySizes; @@ -6193,11 +6332,11 @@ yyreduce: parseContext.arraySizeCheck((yyvsp[0].interm.intermTypedNode)->getLoc(), (yyvsp[0].interm.intermTypedNode), size, "type parameter"); (yyval.interm.typeParameters)->addInnerSize(size); } -#line 6195 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6336 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 198: -#line 1598 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 205: +#line 1652 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.typeParameters) = (yyvsp[-2].interm.typeParameters); @@ -6205,300 +6344,300 @@ yyreduce: parseContext.arraySizeCheck((yyvsp[0].interm.intermTypedNode)->getLoc(), (yyvsp[0].interm.intermTypedNode), size, "type parameter"); (yyval.interm.typeParameters)->addInnerSize(size); } -#line 6207 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6348 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 199: -#line 1608 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 206: +#line 1662 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtVoid; } -#line 6216 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6357 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 200: -#line 1612 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 207: +#line 1666 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtFloat; } -#line 6225 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6366 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 201: -#line 1616 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 208: +#line 1670 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtInt; } -#line 6234 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6375 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 202: -#line 1620 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 209: +#line 1674 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "unsigned integer"); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtUint; } -#line 6244 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 203: -#line 1625 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtBool; - } -#line 6253 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 204: -#line 1629 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat; - (yyval.interm.type).setVector(2); - } -#line 6263 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 205: -#line 1634 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat; - (yyval.interm.type).setVector(3); - } -#line 6273 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 206: -#line 1639 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat; - (yyval.interm.type).setVector(4); - } -#line 6283 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 207: -#line 1644 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtBool; - (yyval.interm.type).setVector(2); - } -#line 6293 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 208: -#line 1649 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtBool; - (yyval.interm.type).setVector(3); - } -#line 6303 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 209: -#line 1654 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtBool; - (yyval.interm.type).setVector(4); - } -#line 6313 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6385 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 210: -#line 1659 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 1679 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtInt; - (yyval.interm.type).setVector(2); + (yyval.interm.type).basicType = EbtBool; } -#line 6323 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6394 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 211: -#line 1664 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 1683 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtFloat; + (yyval.interm.type).setVector(2); + } +#line 6404 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 212: +#line 1688 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtFloat; + (yyval.interm.type).setVector(3); + } +#line 6414 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 213: +#line 1693 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtFloat; + (yyval.interm.type).setVector(4); + } +#line 6424 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 214: +#line 1698 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtBool; + (yyval.interm.type).setVector(2); + } +#line 6434 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 215: +#line 1703 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtBool; + (yyval.interm.type).setVector(3); + } +#line 6444 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 216: +#line 1708 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtBool; + (yyval.interm.type).setVector(4); + } +#line 6454 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 217: +#line 1713 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtInt; + (yyval.interm.type).setVector(2); + } +#line 6464 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 218: +#line 1718 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtInt; (yyval.interm.type).setVector(3); } -#line 6333 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6474 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 212: -#line 1669 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 219: +#line 1723 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtInt; (yyval.interm.type).setVector(4); } -#line 6343 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6484 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 213: -#line 1674 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 220: +#line 1728 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "unsigned integer vector"); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtUint; (yyval.interm.type).setVector(2); } -#line 6354 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6495 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 214: -#line 1680 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 221: +#line 1734 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "unsigned integer vector"); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtUint; (yyval.interm.type).setVector(3); } -#line 6365 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6506 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 215: -#line 1686 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 222: +#line 1740 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "unsigned integer vector"); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtUint; (yyval.interm.type).setVector(4); } -#line 6376 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6517 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 216: -#line 1692 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 223: +#line 1746 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtFloat; (yyval.interm.type).setMatrix(2, 2); } -#line 6386 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6527 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 217: -#line 1697 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 224: +#line 1751 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtFloat; (yyval.interm.type).setMatrix(3, 3); } -#line 6396 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6537 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 218: -#line 1702 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 225: +#line 1756 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtFloat; (yyval.interm.type).setMatrix(4, 4); } -#line 6406 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6547 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 219: -#line 1707 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 226: +#line 1761 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtFloat; (yyval.interm.type).setMatrix(2, 2); } -#line 6416 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6557 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 220: -#line 1712 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 227: +#line 1766 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtFloat; (yyval.interm.type).setMatrix(2, 3); } -#line 6426 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6567 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 221: -#line 1717 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 228: +#line 1771 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtFloat; (yyval.interm.type).setMatrix(2, 4); } -#line 6436 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6577 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 222: -#line 1722 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 229: +#line 1776 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtFloat; (yyval.interm.type).setMatrix(3, 2); } -#line 6446 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6587 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 223: -#line 1727 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 230: +#line 1781 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtFloat; (yyval.interm.type).setMatrix(3, 3); } -#line 6456 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6597 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 224: -#line 1732 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 231: +#line 1786 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtFloat; (yyval.interm.type).setMatrix(3, 4); } -#line 6466 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6607 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 225: -#line 1737 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 232: +#line 1791 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtFloat; (yyval.interm.type).setMatrix(4, 2); } -#line 6476 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6617 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 226: -#line 1742 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 233: +#line 1796 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtFloat; (yyval.interm.type).setMatrix(4, 3); } -#line 6486 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6627 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 227: -#line 1747 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 234: +#line 1801 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtFloat; (yyval.interm.type).setMatrix(4, 4); } -#line 6496 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6637 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 228: -#line 1753 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 235: +#line 1807 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double"); if (! parseContext.symbolTable.atBuiltInLevel()) @@ -6506,2658 +6645,2676 @@ yyreduce: (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtDouble; } -#line 6506 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6649 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 229: -#line 1758 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 236: +#line 1814 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16ScalarVectorCheck((yyvsp[0].lex).loc, "float16_t", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtFloat16; } -#line 6516 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6659 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 230: -#line 1763 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 237: +#line 1819 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtFloat; } -#line 6526 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6669 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 231: -#line 1768 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 238: +#line 1824 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtDouble; } -#line 6536 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6679 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 232: -#line 1773 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 239: +#line 1829 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit signed integer", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtInt8; } -#line 6546 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6689 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 233: -#line 1778 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 240: +#line 1834 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit unsigned integer", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtUint8; } -#line 6556 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6699 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 234: -#line 1783 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 241: +#line 1839 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit signed integer", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtInt16; } -#line 6566 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6709 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 235: -#line 1788 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 242: +#line 1844 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit unsigned integer", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtUint16; } -#line 6576 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6719 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 236: -#line 1793 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 243: +#line 1849 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit signed integer", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtInt; } -#line 6586 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6729 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 237: -#line 1798 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 244: +#line 1854 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit unsigned integer", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtUint; } -#line 6596 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6739 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 238: -#line 1803 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 245: +#line 1859 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.int64Check((yyvsp[0].lex).loc, "64-bit integer", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtInt64; } -#line 6606 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6749 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 239: -#line 1808 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 246: +#line 1864 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.int64Check((yyvsp[0].lex).loc, "64-bit unsigned integer", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtUint64; } -#line 6616 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 240: -#line 1813 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double vector"); - if (! parseContext.symbolTable.atBuiltInLevel()) - parseContext.doubleCheck((yyvsp[0].lex).loc, "double vector"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setVector(2); - } -#line 6627 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 241: -#line 1819 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double vector"); - if (! parseContext.symbolTable.atBuiltInLevel()) - parseContext.doubleCheck((yyvsp[0].lex).loc, "double vector"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setVector(3); - } -#line 6638 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 242: -#line 1825 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double vector"); - if (! parseContext.symbolTable.atBuiltInLevel()) - parseContext.doubleCheck((yyvsp[0].lex).loc, "double vector"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setVector(4); - } -#line 6649 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 243: -#line 1831 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.float16ScalarVectorCheck((yyvsp[0].lex).loc, "half float vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; - (yyval.interm.type).setVector(2); - } -#line 6660 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 244: -#line 1837 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.float16ScalarVectorCheck((yyvsp[0].lex).loc, "half float vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; - (yyval.interm.type).setVector(3); - } -#line 6671 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 245: -#line 1843 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.float16ScalarVectorCheck((yyvsp[0].lex).loc, "half float vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; - (yyval.interm.type).setVector(4); - } -#line 6682 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 246: -#line 1849 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat; - (yyval.interm.type).setVector(2); - } -#line 6693 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 247: -#line 1855 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat; - (yyval.interm.type).setVector(3); - } -#line 6704 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 248: -#line 1861 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat; - (yyval.interm.type).setVector(4); - } -#line 6715 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 249: -#line 1867 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setVector(2); - } -#line 6726 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 250: -#line 1873 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setVector(3); - } -#line 6737 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 251: -#line 1879 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setVector(4); - } -#line 6748 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 252: -#line 1885 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtInt8; - (yyval.interm.type).setVector(2); - } #line 6759 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 253: -#line 1891 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 247: +#line 1869 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { - parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel()); + parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double vector"); + if (! parseContext.symbolTable.atBuiltInLevel()) + parseContext.doubleCheck((yyvsp[0].lex).loc, "double vector"); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtInt8; + (yyval.interm.type).basicType = EbtDouble; + (yyval.interm.type).setVector(2); + } +#line 6772 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 248: +#line 1877 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double vector"); + if (! parseContext.symbolTable.atBuiltInLevel()) + parseContext.doubleCheck((yyvsp[0].lex).loc, "double vector"); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtDouble; (yyval.interm.type).setVector(3); } -#line 6770 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6785 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 249: +#line 1885 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double vector"); + if (! parseContext.symbolTable.atBuiltInLevel()) + parseContext.doubleCheck((yyvsp[0].lex).loc, "double vector"); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtDouble; + (yyval.interm.type).setVector(4); + } +#line 6798 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 250: +#line 1893 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.float16ScalarVectorCheck((yyvsp[0].lex).loc, "half float vector", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtFloat16; + (yyval.interm.type).setVector(2); + } +#line 6809 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 251: +#line 1899 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.float16ScalarVectorCheck((yyvsp[0].lex).loc, "half float vector", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtFloat16; + (yyval.interm.type).setVector(3); + } +#line 6820 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 252: +#line 1905 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.float16ScalarVectorCheck((yyvsp[0].lex).loc, "half float vector", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtFloat16; + (yyval.interm.type).setVector(4); + } +#line 6831 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 253: +#line 1911 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t vector", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtFloat; + (yyval.interm.type).setVector(2); + } +#line 6842 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 254: -#line 1897 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 1917 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t vector", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtFloat; + (yyval.interm.type).setVector(3); + } +#line 6853 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 255: +#line 1923 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t vector", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtFloat; + (yyval.interm.type).setVector(4); + } +#line 6864 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 256: +#line 1929 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t vector", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtDouble; + (yyval.interm.type).setVector(2); + } +#line 6875 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 257: +#line 1935 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t vector", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtDouble; + (yyval.interm.type).setVector(3); + } +#line 6886 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 258: +#line 1941 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t vector", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtDouble; + (yyval.interm.type).setVector(4); + } +#line 6897 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 259: +#line 1947 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtInt8; + (yyval.interm.type).setVector(2); + } +#line 6908 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 260: +#line 1953 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtInt8; + (yyval.interm.type).setVector(3); + } +#line 6919 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 261: +#line 1959 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtInt8; (yyval.interm.type).setVector(4); } -#line 6781 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 255: -#line 1903 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtInt16; - (yyval.interm.type).setVector(2); - } -#line 6792 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 256: -#line 1909 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtInt16; - (yyval.interm.type).setVector(3); - } -#line 6803 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 257: -#line 1915 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtInt16; - (yyval.interm.type).setVector(4); - } -#line 6814 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 258: -#line 1921 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtInt; - (yyval.interm.type).setVector(2); - } -#line 6825 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 259: -#line 1927 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtInt; - (yyval.interm.type).setVector(3); - } -#line 6836 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 260: -#line 1933 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtInt; - (yyval.interm.type).setVector(4); - } -#line 6847 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 261: -#line 1939 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.int64Check((yyvsp[0].lex).loc, "64-bit integer vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtInt64; - (yyval.interm.type).setVector(2); - } -#line 6858 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6930 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 262: -#line 1945 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 1965 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { - parseContext.int64Check((yyvsp[0].lex).loc, "64-bit integer vector", parseContext.symbolTable.atBuiltInLevel()); + parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtInt64; - (yyval.interm.type).setVector(3); + (yyval.interm.type).basicType = EbtInt16; + (yyval.interm.type).setVector(2); } -#line 6869 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 6941 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 263: -#line 1951 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 1971 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtInt16; + (yyval.interm.type).setVector(3); + } +#line 6952 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 264: +#line 1977 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtInt16; + (yyval.interm.type).setVector(4); + } +#line 6963 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 265: +#line 1983 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtInt; + (yyval.interm.type).setVector(2); + } +#line 6974 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 266: +#line 1989 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtInt; + (yyval.interm.type).setVector(3); + } +#line 6985 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 267: +#line 1995 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtInt; + (yyval.interm.type).setVector(4); + } +#line 6996 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 268: +#line 2001 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.int64Check((yyvsp[0].lex).loc, "64-bit integer vector", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtInt64; + (yyval.interm.type).setVector(2); + } +#line 7007 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 269: +#line 2007 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.int64Check((yyvsp[0].lex).loc, "64-bit integer vector", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtInt64; + (yyval.interm.type).setVector(3); + } +#line 7018 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 270: +#line 2013 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.int64Check((yyvsp[0].lex).loc, "64-bit integer vector", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtInt64; (yyval.interm.type).setVector(4); } -#line 6880 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 264: -#line 1957 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtUint8; - (yyval.interm.type).setVector(2); - } -#line 6891 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 265: -#line 1963 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtUint8; - (yyval.interm.type).setVector(3); - } -#line 6902 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 266: -#line 1969 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtUint8; - (yyval.interm.type).setVector(4); - } -#line 6913 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 267: -#line 1975 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtUint16; - (yyval.interm.type).setVector(2); - } -#line 6924 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 268: -#line 1981 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtUint16; - (yyval.interm.type).setVector(3); - } -#line 6935 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 269: -#line 1987 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtUint16; - (yyval.interm.type).setVector(4); - } -#line 6946 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 270: -#line 1993 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtUint; - (yyval.interm.type).setVector(2); - } -#line 6957 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7029 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 271: -#line 1999 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 2019 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtUint8; + (yyval.interm.type).setVector(2); + } +#line 7040 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 272: +#line 2025 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtUint8; + (yyval.interm.type).setVector(3); + } +#line 7051 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 273: +#line 2031 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtUint8; + (yyval.interm.type).setVector(4); + } +#line 7062 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 274: +#line 2037 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtUint16; + (yyval.interm.type).setVector(2); + } +#line 7073 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 275: +#line 2043 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtUint16; + (yyval.interm.type).setVector(3); + } +#line 7084 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 276: +#line 2049 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtUint16; + (yyval.interm.type).setVector(4); + } +#line 7095 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 277: +#line 2055 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtUint; + (yyval.interm.type).setVector(2); + } +#line 7106 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 278: +#line 2061 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtUint; (yyval.interm.type).setVector(3); } -#line 6968 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7117 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 272: -#line 2005 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 279: +#line 2067 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtUint; (yyval.interm.type).setVector(4); } -#line 6979 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7128 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 273: -#line 2011 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 280: +#line 2073 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.int64Check((yyvsp[0].lex).loc, "64-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtUint64; (yyval.interm.type).setVector(2); } -#line 6990 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7139 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 274: -#line 2017 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 281: +#line 2079 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.int64Check((yyvsp[0].lex).loc, "64-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtUint64; (yyval.interm.type).setVector(3); } -#line 7001 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7150 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 275: -#line 2023 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 282: +#line 2085 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.int64Check((yyvsp[0].lex).loc, "64-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtUint64; (yyval.interm.type).setVector(4); } -#line 7012 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 276: -#line 2029 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix"); - if (! parseContext.symbolTable.atBuiltInLevel()) - parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setMatrix(2, 2); - } -#line 7023 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 277: -#line 2035 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix"); - if (! parseContext.symbolTable.atBuiltInLevel()) - parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setMatrix(3, 3); - } -#line 7034 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 278: -#line 2041 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix"); - if (! parseContext.symbolTable.atBuiltInLevel()) - parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setMatrix(4, 4); - } -#line 7045 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 279: -#line 2047 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix"); - if (! parseContext.symbolTable.atBuiltInLevel()) - parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setMatrix(2, 2); - } -#line 7056 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 280: -#line 2053 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix"); - if (! parseContext.symbolTable.atBuiltInLevel()) - parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setMatrix(2, 3); - } -#line 7067 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 281: -#line 2059 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix"); - if (! parseContext.symbolTable.atBuiltInLevel()) - parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setMatrix(2, 4); - } -#line 7078 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 282: -#line 2065 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix"); - if (! parseContext.symbolTable.atBuiltInLevel()) - parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setMatrix(3, 2); - } -#line 7089 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7161 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 283: -#line 2071 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 2091 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix"); if (! parseContext.symbolTable.atBuiltInLevel()) parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setMatrix(3, 3); + (yyval.interm.type).setMatrix(2, 2); } -#line 7100 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7174 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 284: -#line 2077 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 2099 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix"); if (! parseContext.symbolTable.atBuiltInLevel()) parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setMatrix(3, 4); + (yyval.interm.type).setMatrix(3, 3); } -#line 7111 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7187 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 285: -#line 2083 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 2107 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix"); if (! parseContext.symbolTable.atBuiltInLevel()) parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setMatrix(4, 2); + (yyval.interm.type).setMatrix(4, 4); } -#line 7122 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7200 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 286: -#line 2089 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 2115 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix"); if (! parseContext.symbolTable.atBuiltInLevel()) parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setMatrix(4, 3); + (yyval.interm.type).setMatrix(2, 2); } -#line 7133 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7213 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 287: -#line 2095 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 2123 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix"); if (! parseContext.symbolTable.atBuiltInLevel()) parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setMatrix(4, 4); + (yyval.interm.type).setMatrix(2, 3); } -#line 7144 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7226 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 288: -#line 2101 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 2131 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { - parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); + parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix"); + if (! parseContext.symbolTable.atBuiltInLevel()) + parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; - (yyval.interm.type).setMatrix(2, 2); + (yyval.interm.type).basicType = EbtDouble; + (yyval.interm.type).setMatrix(2, 4); } -#line 7155 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7239 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 289: -#line 2107 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 2139 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { - parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); + parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix"); + if (! parseContext.symbolTable.atBuiltInLevel()) + parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; - (yyval.interm.type).setMatrix(3, 3); + (yyval.interm.type).basicType = EbtDouble; + (yyval.interm.type).setMatrix(3, 2); } -#line 7166 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7252 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 290: -#line 2113 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 2147 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { - parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); + parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix"); + if (! parseContext.symbolTable.atBuiltInLevel()) + parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; - (yyval.interm.type).setMatrix(4, 4); - } -#line 7177 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 291: -#line 2119 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; - (yyval.interm.type).setMatrix(2, 2); - } -#line 7188 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 292: -#line 2125 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; - (yyval.interm.type).setMatrix(2, 3); - } -#line 7199 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 293: -#line 2131 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; - (yyval.interm.type).setMatrix(2, 4); - } -#line 7210 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 294: -#line 2137 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; - (yyval.interm.type).setMatrix(3, 2); - } -#line 7221 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 295: -#line 2143 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; + (yyval.interm.type).basicType = EbtDouble; (yyval.interm.type).setMatrix(3, 3); } -#line 7232 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 296: -#line 2149 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; - (yyval.interm.type).setMatrix(3, 4); - } -#line 7243 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 297: -#line 2155 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; - (yyval.interm.type).setMatrix(4, 2); - } -#line 7254 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 298: -#line 2161 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; - (yyval.interm.type).setMatrix(4, 3); - } #line 7265 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 299: -#line 2167 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 291: +#line 2155 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix"); + if (! parseContext.symbolTable.atBuiltInLevel()) + parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtDouble; + (yyval.interm.type).setMatrix(3, 4); + } +#line 7278 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 292: +#line 2163 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix"); + if (! parseContext.symbolTable.atBuiltInLevel()) + parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtDouble; + (yyval.interm.type).setMatrix(4, 2); + } +#line 7291 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 293: +#line 2171 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix"); + if (! parseContext.symbolTable.atBuiltInLevel()) + parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtDouble; + (yyval.interm.type).setMatrix(4, 3); + } +#line 7304 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 294: +#line 2179 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix"); + if (! parseContext.symbolTable.atBuiltInLevel()) + parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtDouble; + (yyval.interm.type).setMatrix(4, 4); + } +#line 7317 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 295: +#line 2187 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtFloat16; + (yyval.interm.type).setMatrix(2, 2); + } +#line 7328 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 296: +#line 2193 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtFloat16; + (yyval.interm.type).setMatrix(3, 3); + } +#line 7339 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 297: +#line 2199 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtFloat16; (yyval.interm.type).setMatrix(4, 4); } -#line 7276 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7350 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 298: +#line 2205 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtFloat16; + (yyval.interm.type).setMatrix(2, 2); + } +#line 7361 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 299: +#line 2211 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtFloat16; + (yyval.interm.type).setMatrix(2, 3); + } +#line 7372 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 300: -#line 2173 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 2217 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { - parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel()); + parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat; - (yyval.interm.type).setMatrix(2, 2); + (yyval.interm.type).basicType = EbtFloat16; + (yyval.interm.type).setMatrix(2, 4); } -#line 7287 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7383 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 301: -#line 2179 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 2223 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { - parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel()); + parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat; - (yyval.interm.type).setMatrix(3, 3); + (yyval.interm.type).basicType = EbtFloat16; + (yyval.interm.type).setMatrix(3, 2); } -#line 7298 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7394 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 302: -#line 2185 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 2229 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { - parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel()); + parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat; - (yyval.interm.type).setMatrix(4, 4); + (yyval.interm.type).basicType = EbtFloat16; + (yyval.interm.type).setMatrix(3, 3); } -#line 7309 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7405 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 303: -#line 2191 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 2235 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { - parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel()); + parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat; - (yyval.interm.type).setMatrix(2, 2); + (yyval.interm.type).basicType = EbtFloat16; + (yyval.interm.type).setMatrix(3, 4); } -#line 7320 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7416 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 304: -#line 2197 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 2241 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { - parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel()); + parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat; - (yyval.interm.type).setMatrix(2, 3); + (yyval.interm.type).basicType = EbtFloat16; + (yyval.interm.type).setMatrix(4, 2); } -#line 7331 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7427 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 305: -#line 2203 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 2247 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtFloat16; + (yyval.interm.type).setMatrix(4, 3); + } +#line 7438 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 306: +#line 2253 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtFloat16; + (yyval.interm.type).setMatrix(4, 4); + } +#line 7449 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 307: +#line 2259 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtFloat; + (yyval.interm.type).setMatrix(2, 2); + } +#line 7460 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 308: +#line 2265 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtFloat; + (yyval.interm.type).setMatrix(3, 3); + } +#line 7471 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 309: +#line 2271 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtFloat; + (yyval.interm.type).setMatrix(4, 4); + } +#line 7482 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 310: +#line 2277 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtFloat; + (yyval.interm.type).setMatrix(2, 2); + } +#line 7493 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 311: +#line 2283 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel()); + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtFloat; + (yyval.interm.type).setMatrix(2, 3); + } +#line 7504 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 312: +#line 2289 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtFloat; (yyval.interm.type).setMatrix(2, 4); } -#line 7342 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7515 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 306: -#line 2209 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 313: +#line 2295 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtFloat; (yyval.interm.type).setMatrix(3, 2); } -#line 7353 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7526 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 307: -#line 2215 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 314: +#line 2301 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtFloat; (yyval.interm.type).setMatrix(3, 3); } -#line 7364 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7537 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 308: -#line 2221 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 315: +#line 2307 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtFloat; (yyval.interm.type).setMatrix(3, 4); } -#line 7375 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7548 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 309: -#line 2227 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 316: +#line 2313 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtFloat; (yyval.interm.type).setMatrix(4, 2); } -#line 7386 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7559 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 310: -#line 2233 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 317: +#line 2319 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtFloat; (yyval.interm.type).setMatrix(4, 3); } -#line 7397 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7570 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 311: -#line 2239 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 318: +#line 2325 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtFloat; (yyval.interm.type).setMatrix(4, 4); } -#line 7408 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7581 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 312: -#line 2245 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 319: +#line 2331 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtDouble; (yyval.interm.type).setMatrix(2, 2); } -#line 7419 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7592 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 313: -#line 2251 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 320: +#line 2337 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtDouble; (yyval.interm.type).setMatrix(3, 3); } -#line 7430 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7603 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 314: -#line 2257 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 321: +#line 2343 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtDouble; (yyval.interm.type).setMatrix(4, 4); } -#line 7441 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7614 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 315: -#line 2263 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 322: +#line 2349 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtDouble; (yyval.interm.type).setMatrix(2, 2); } -#line 7452 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7625 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 316: -#line 2269 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 323: +#line 2355 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtDouble; (yyval.interm.type).setMatrix(2, 3); } -#line 7463 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7636 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 317: -#line 2275 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 324: +#line 2361 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtDouble; (yyval.interm.type).setMatrix(2, 4); } -#line 7474 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7647 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 318: -#line 2281 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 325: +#line 2367 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtDouble; (yyval.interm.type).setMatrix(3, 2); } -#line 7485 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7658 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 319: -#line 2287 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 326: +#line 2373 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtDouble; (yyval.interm.type).setMatrix(3, 3); } -#line 7496 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7669 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 320: -#line 2293 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 327: +#line 2379 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtDouble; (yyval.interm.type).setMatrix(3, 4); } -#line 7507 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7680 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 321: -#line 2299 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 328: +#line 2385 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtDouble; (yyval.interm.type).setMatrix(4, 2); } -#line 7518 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7691 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 322: -#line 2305 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 329: +#line 2391 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtDouble; (yyval.interm.type).setMatrix(4, 3); } -#line 7529 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7702 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 323: -#line 2311 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 330: +#line 2397 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtDouble; (yyval.interm.type).setMatrix(4, 4); } -#line 7540 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7713 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 324: -#line 2317 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 331: +#line 2403 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtAccStructNV; + (yyval.interm.type).basicType = EbtAccStruct; } -#line 7549 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7722 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 325: -#line 2321 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 332: +#line 2407 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtAccStruct; + } +#line 7731 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 333: +#line 2411 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); + (yyval.interm.type).basicType = EbtRayQuery; + } +#line 7740 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 334: +#line 2415 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.vulkanRemoved((yyvsp[0].lex).loc, "atomic counter types"); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtAtomicUint; } -#line 7559 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7750 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 326: -#line 2326 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 335: +#line 2420 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat, Esd1D); } -#line 7569 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7760 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 327: -#line 2332 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 336: +#line 2426 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat, Esd2D); } -#line 7579 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7770 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 328: -#line 2337 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 337: +#line 2431 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat, Esd3D); } -#line 7589 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7780 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 329: -#line 2342 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 338: +#line 2436 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat, EsdCube); } -#line 7599 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7790 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 330: -#line 2347 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 339: +#line 2441 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat, Esd2D, false, true); } -#line 7609 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7800 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 331: -#line 2352 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 340: +#line 2446 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat, EsdCube, false, true); } -#line 7619 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7810 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 332: -#line 2357 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 341: +#line 2451 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat, Esd2D, true); } -#line 7629 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7820 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 333: -#line 2362 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 342: +#line 2456 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat, Esd2D, true, true); } -#line 7639 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7830 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 334: -#line 2368 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 343: +#line 2462 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat, Esd1D, false, true); } -#line 7649 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7840 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 335: -#line 2373 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 344: +#line 2467 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat, Esd1D, true); } -#line 7659 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7850 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 336: -#line 2378 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 345: +#line 2472 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat, Esd1D, true, true); } -#line 7669 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7860 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 337: -#line 2383 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 346: +#line 2477 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat, EsdCube, true); } -#line 7679 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7870 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 338: -#line 2388 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 347: +#line 2482 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat, EsdCube, true, true); } -#line 7689 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7880 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 339: -#line 2393 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 348: +#line 2487 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat16, Esd1D); } -#line 7700 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7891 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 340: -#line 2399 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 349: +#line 2493 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat16, Esd2D); } -#line 7711 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7902 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 341: -#line 2405 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 350: +#line 2499 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat16, Esd3D); } -#line 7722 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7913 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 342: -#line 2411 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 351: +#line 2505 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat16, EsdCube); } -#line 7733 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7924 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 343: -#line 2417 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 352: +#line 2511 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat16, Esd1D, false, true); } -#line 7744 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7935 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 344: -#line 2423 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 353: +#line 2517 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat16, Esd2D, false, true); } -#line 7755 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7946 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 345: -#line 2429 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 354: +#line 2523 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat16, EsdCube, false, true); } -#line 7766 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7957 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 346: -#line 2435 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 355: +#line 2529 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat16, Esd1D, true); } -#line 7777 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7968 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 347: -#line 2441 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 356: +#line 2535 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat16, Esd2D, true); } -#line 7788 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7979 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 348: -#line 2447 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 357: +#line 2541 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat16, Esd1D, true, true); } -#line 7799 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 7990 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 349: -#line 2453 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 358: +#line 2547 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat16, Esd2D, true, true); } -#line 7810 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8001 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 350: -#line 2459 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 359: +#line 2553 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat16, EsdCube, true); } -#line 7821 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8012 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 351: -#line 2465 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 360: +#line 2559 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat16, EsdCube, true, true); } -#line 7832 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8023 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 352: -#line 2471 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 361: +#line 2565 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtInt, Esd1D); } -#line 7842 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8033 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 353: -#line 2477 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 362: +#line 2571 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtInt, Esd2D); } -#line 7852 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8043 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 354: -#line 2482 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 363: +#line 2576 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtInt, Esd3D); } -#line 7862 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8053 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 355: -#line 2487 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 364: +#line 2581 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtInt, EsdCube); } -#line 7872 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8063 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 356: -#line 2492 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 365: +#line 2586 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtInt, Esd2D, true); } -#line 7882 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8073 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 357: -#line 2497 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 366: +#line 2591 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtUint, Esd2D); } -#line 7892 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8083 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 358: -#line 2502 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 367: +#line 2596 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtUint, Esd3D); } -#line 7902 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8093 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 359: -#line 2507 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 368: +#line 2601 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtUint, EsdCube); } -#line 7912 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8103 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 360: -#line 2513 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 369: +#line 2607 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtInt, Esd1D, true); } -#line 7922 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8113 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 361: -#line 2518 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 370: +#line 2612 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtInt, EsdCube, true); } -#line 7932 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8123 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 362: -#line 2523 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 371: +#line 2617 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtUint, Esd1D); } -#line 7942 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8133 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 363: -#line 2528 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 372: +#line 2622 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtUint, Esd1D, true); } -#line 7952 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8143 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 364: -#line 2533 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 373: +#line 2627 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtUint, EsdCube, true); } -#line 7962 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8153 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 365: -#line 2538 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 374: +#line 2632 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtFloat, EsdCube, true); } -#line 7972 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8163 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 366: -#line 2543 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 375: +#line 2637 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtInt, EsdCube, true); } -#line 7982 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8173 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 367: -#line 2548 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 376: +#line 2642 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtUint, EsdCube, true); } -#line 7992 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8183 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 368: -#line 2554 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 377: +#line 2648 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtUint, Esd2D, true); } -#line 8002 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8193 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 369: -#line 2559 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 378: +#line 2653 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtFloat, Esd2D); } -#line 8012 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8203 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 370: -#line 2564 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 379: +#line 2658 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtFloat, Esd3D); } -#line 8022 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8213 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 371: -#line 2569 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 380: +#line 2663 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtFloat, Esd2D, true); } -#line 8032 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8223 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 372: -#line 2574 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 381: +#line 2668 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtFloat, EsdCube); } -#line 8042 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8233 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 373: -#line 2579 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 382: +#line 2673 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtInt, Esd2D); } -#line 8052 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8243 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 374: -#line 2584 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 383: +#line 2678 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtInt, Esd3D); } -#line 8062 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8253 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 375: -#line 2589 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 384: +#line 2683 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtInt, EsdCube); } -#line 8072 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8263 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 376: -#line 2594 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 385: +#line 2688 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtInt, Esd2D, true); } -#line 8082 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8273 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 377: -#line 2599 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 386: +#line 2693 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtUint, Esd2D); } -#line 8092 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8283 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 378: -#line 2604 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 387: +#line 2698 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtUint, Esd3D); } -#line 8102 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8293 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 379: -#line 2609 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 388: +#line 2703 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtUint, EsdCube); } -#line 8112 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8303 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 380: -#line 2614 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 389: +#line 2708 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtUint, Esd2D, true); } -#line 8122 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8313 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 381: -#line 2619 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 390: +#line 2713 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setPureSampler(false); } -#line 8132 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8323 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 382: -#line 2624 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 391: +#line 2718 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setPureSampler(true); } -#line 8142 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8333 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 383: -#line 2630 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 392: +#line 2724 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat, EsdRect); } -#line 8152 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8343 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 384: -#line 2635 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 393: +#line 2729 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat, EsdRect, false, true); } -#line 8162 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8353 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 385: -#line 2640 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 394: +#line 2734 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat16, EsdRect); } -#line 8173 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8364 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 386: -#line 2646 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 395: +#line 2740 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat16, EsdRect, false, true); } -#line 8184 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8375 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 387: -#line 2652 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 396: +#line 2746 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtInt, EsdRect); } -#line 8194 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8385 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 388: -#line 2657 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 397: +#line 2751 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtUint, EsdRect); } -#line 8204 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8395 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 389: -#line 2662 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 398: +#line 2756 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat, EsdBuffer); } -#line 8214 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8405 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 390: -#line 2667 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 399: +#line 2761 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat16, EsdBuffer); } -#line 8225 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8416 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 391: -#line 2673 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 400: +#line 2767 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtInt, EsdBuffer); } -#line 8235 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8426 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 392: -#line 2678 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 401: +#line 2772 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtUint, EsdBuffer); } -#line 8245 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8436 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 393: -#line 2683 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 402: +#line 2777 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat, Esd2D, false, false, true); } -#line 8255 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8446 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 394: -#line 2688 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 403: +#line 2782 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat16, Esd2D, false, false, true); } -#line 8266 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8457 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 395: -#line 2694 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 404: +#line 2788 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtInt, Esd2D, false, false, true); } -#line 8276 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8467 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 396: -#line 2699 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 405: +#line 2793 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtUint, Esd2D, false, false, true); } -#line 8286 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8477 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 397: -#line 2704 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 406: +#line 2798 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat, Esd2D, true, false, true); } -#line 8296 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8487 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 398: -#line 2709 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 407: +#line 2803 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat16, Esd2D, true, false, true); } -#line 8307 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8498 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 399: -#line 2715 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 408: +#line 2809 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtInt, Esd2D, true, false, true); } -#line 8317 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8508 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 400: -#line 2720 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 409: +#line 2814 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtUint, Esd2D, true, false, true); } -#line 8327 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8518 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 401: -#line 2725 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 410: +#line 2819 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtFloat, Esd1D); } -#line 8337 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8528 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 402: -#line 2730 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 411: +#line 2824 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtFloat16, Esd1D); } -#line 8348 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8539 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 403: -#line 2736 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 412: +#line 2830 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtFloat16, Esd2D); } -#line 8359 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8550 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 404: -#line 2742 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 413: +#line 2836 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtFloat16, Esd3D); } -#line 8370 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8561 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 405: -#line 2748 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 414: +#line 2842 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtFloat16, EsdCube); } -#line 8381 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8572 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 406: -#line 2754 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 415: +#line 2848 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtFloat, Esd1D, true); } -#line 8391 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8582 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 407: -#line 2759 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 416: +#line 2853 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtFloat16, Esd1D, true); } -#line 8402 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8593 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 408: -#line 2765 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 417: +#line 2859 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtFloat16, Esd2D, true); } -#line 8413 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8604 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 409: -#line 2771 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 418: +#line 2865 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtFloat16, EsdCube, true); } -#line 8424 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8615 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 410: -#line 2777 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 419: +#line 2871 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtInt, Esd1D); } -#line 8434 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8625 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 411: -#line 2782 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 420: +#line 2876 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtInt, Esd1D, true); } -#line 8444 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8635 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 412: -#line 2787 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 421: +#line 2881 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtUint, Esd1D); } -#line 8454 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8645 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 413: -#line 2792 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 422: +#line 2886 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtUint, Esd1D, true); } -#line 8464 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8655 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 414: -#line 2797 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 423: +#line 2891 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtFloat, EsdRect); } -#line 8474 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8665 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 415: -#line 2802 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 424: +#line 2896 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtFloat16, EsdRect); } -#line 8485 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8676 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 416: -#line 2808 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 425: +#line 2902 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtInt, EsdRect); } -#line 8495 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8686 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 417: -#line 2813 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 426: +#line 2907 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtUint, EsdRect); } -#line 8505 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8696 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 418: -#line 2818 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 427: +#line 2912 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtFloat, EsdBuffer); } -#line 8515 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8706 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 419: -#line 2823 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 428: +#line 2917 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtFloat16, EsdBuffer); } -#line 8526 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8717 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 420: -#line 2829 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 429: +#line 2923 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtInt, EsdBuffer); } -#line 8536 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8727 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 421: -#line 2834 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 430: +#line 2928 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtUint, EsdBuffer); } -#line 8546 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8737 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 422: -#line 2839 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 431: +#line 2933 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtFloat, Esd2D, false, false, true); } -#line 8556 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8747 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 423: -#line 2844 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 432: +#line 2938 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtFloat16, Esd2D, false, false, true); } -#line 8567 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8758 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 424: -#line 2850 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 433: +#line 2944 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtInt, Esd2D, false, false, true); } -#line 8577 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8768 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 425: -#line 2855 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 434: +#line 2949 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtUint, Esd2D, false, false, true); } -#line 8587 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8778 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 426: -#line 2860 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 435: +#line 2954 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtFloat, Esd2D, true, false, true); } -#line 8597 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8788 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 427: -#line 2865 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 436: +#line 2959 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtFloat16, Esd2D, true, false, true); } -#line 8608 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8799 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 428: -#line 2871 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 437: +#line 2965 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtInt, Esd2D, true, false, true); } -#line 8618 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8809 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 429: -#line 2876 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 438: +#line 2970 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setTexture(EbtUint, Esd2D, true, false, true); } -#line 8628 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8819 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 430: -#line 2881 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 439: +#line 2975 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtFloat, Esd1D); } -#line 8638 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8829 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 431: -#line 2886 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 440: +#line 2980 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtFloat16, Esd1D); } -#line 8649 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8840 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 432: -#line 2892 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 441: +#line 2986 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtInt, Esd1D); } -#line 8659 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8850 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 433: -#line 2897 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 442: +#line 2991 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtUint, Esd1D); } -#line 8669 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8860 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 434: -#line 2902 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 443: +#line 2996 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtFloat, Esd2D); } -#line 8679 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8870 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 435: -#line 2907 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 444: +#line 3001 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtFloat16, Esd2D); } -#line 8690 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8881 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 436: -#line 2913 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 445: +#line 3007 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtInt, Esd2D); } -#line 8700 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8891 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 437: -#line 2918 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 446: +#line 3012 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtUint, Esd2D); } -#line 8710 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8901 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 438: -#line 2923 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 447: +#line 3017 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtFloat, Esd3D); } -#line 8720 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8911 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 439: -#line 2928 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 448: +#line 3022 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtFloat16, Esd3D); } -#line 8731 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8922 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 440: -#line 2934 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 449: +#line 3028 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtInt, Esd3D); } -#line 8741 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8932 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 441: -#line 2939 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 450: +#line 3033 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtUint, Esd3D); } -#line 8751 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8942 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 442: -#line 2944 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 451: +#line 3038 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtFloat, EsdRect); } -#line 8761 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8952 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 443: -#line 2949 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 452: +#line 3043 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtFloat16, EsdRect); } -#line 8772 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8963 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 444: -#line 2955 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 453: +#line 3049 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtInt, EsdRect); } -#line 8782 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8973 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 445: -#line 2960 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 454: +#line 3054 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtUint, EsdRect); } -#line 8792 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8983 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 446: -#line 2965 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 455: +#line 3059 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtFloat, EsdCube); } -#line 8802 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 8993 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 447: -#line 2970 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 456: +#line 3064 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtFloat16, EsdCube); } -#line 8813 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9004 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 448: -#line 2976 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 457: +#line 3070 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtInt, EsdCube); } -#line 8823 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9014 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 449: -#line 2981 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 458: +#line 3075 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtUint, EsdCube); } -#line 8833 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9024 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 450: -#line 2986 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 459: +#line 3080 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtFloat, EsdBuffer); } -#line 8843 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9034 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 451: -#line 2991 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 460: +#line 3085 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtFloat16, EsdBuffer); } -#line 8854 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9045 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 452: -#line 2997 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 461: +#line 3091 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtInt, EsdBuffer); } -#line 8864 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9055 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 453: -#line 3002 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 462: +#line 3096 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtUint, EsdBuffer); } -#line 8874 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9065 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 454: -#line 3007 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 463: +#line 3101 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtFloat, Esd1D, true); } -#line 8884 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9075 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 455: -#line 3012 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 464: +#line 3106 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtFloat16, Esd1D, true); } -#line 8895 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9086 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 456: -#line 3018 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 465: +#line 3112 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtInt, Esd1D, true); } -#line 8905 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9096 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 457: -#line 3023 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 466: +#line 3117 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtUint, Esd1D, true); } -#line 8915 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9106 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 458: -#line 3028 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 467: +#line 3122 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtFloat, Esd2D, true); } -#line 8925 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9116 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 459: -#line 3033 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 468: +#line 3127 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtFloat16, Esd2D, true); } -#line 8936 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9127 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 460: -#line 3039 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 469: +#line 3133 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtInt, Esd2D, true); } -#line 8946 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9137 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 461: -#line 3044 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 470: +#line 3138 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtUint, Esd2D, true); } -#line 8956 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9147 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 462: -#line 3049 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 471: +#line 3143 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtFloat, EsdCube, true); } -#line 8966 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9157 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 463: -#line 3054 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 472: +#line 3148 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtFloat16, EsdCube, true); } -#line 8977 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9168 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 464: -#line 3060 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 473: +#line 3154 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtInt, EsdCube, true); } -#line 8987 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9178 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 465: -#line 3065 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 474: +#line 3159 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtUint, EsdCube, true); } -#line 8997 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9188 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 466: -#line 3070 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 475: +#line 3164 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtFloat, Esd2D, false, false, true); } -#line 9007 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9198 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 467: -#line 3075 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 476: +#line 3169 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtFloat16, Esd2D, false, false, true); } -#line 9018 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9209 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 468: -#line 3081 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 477: +#line 3175 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtInt, Esd2D, false, false, true); } -#line 9028 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9219 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 469: -#line 3086 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 478: +#line 3180 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtUint, Esd2D, false, false, true); } -#line 9038 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9229 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 470: -#line 3091 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 479: +#line 3185 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtFloat, Esd2D, true, false, true); } -#line 9048 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9239 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 471: -#line 3096 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 480: +#line 3190 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtFloat16, Esd2D, true, false, true); } -#line 9059 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9250 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 472: -#line 3102 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 481: +#line 3196 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtInt, Esd2D, true, false, true); } -#line 9069 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9260 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 473: -#line 3107 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 482: +#line 3201 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setImage(EbtUint, Esd2D, true, false, true); } -#line 9079 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9270 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 474: -#line 3112 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 483: +#line 3206 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { // GL_OES_EGL_image_external (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat, Esd2D); (yyval.interm.type).sampler.external = true; } -#line 9090 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9281 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 475: -#line 3118 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 484: +#line 3212 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { // GL_EXT_YUV_target (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.set(EbtFloat, Esd2D); (yyval.interm.type).sampler.yuv = true; } -#line 9101 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9292 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 476: -#line 3124 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 485: +#line 3218 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input"); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setSubpass(EbtFloat); } -#line 9112 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9303 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 477: -#line 3130 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 486: +#line 3224 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input"); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setSubpass(EbtFloat, true); } -#line 9123 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9314 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 478: -#line 3136 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 487: +#line 3230 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float subpass input", parseContext.symbolTable.atBuiltInLevel()); parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input"); @@ -9165,11 +9322,11 @@ yyreduce: (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setSubpass(EbtFloat16); } -#line 9135 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9326 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 479: -#line 3143 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 488: +#line 3237 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float subpass input", parseContext.symbolTable.atBuiltInLevel()); parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input"); @@ -9177,98 +9334,98 @@ yyreduce: (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setSubpass(EbtFloat16, true); } -#line 9147 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9338 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 480: -#line 3150 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 489: +#line 3244 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input"); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setSubpass(EbtInt); } -#line 9158 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9349 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 481: -#line 3156 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 490: +#line 3250 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input"); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setSubpass(EbtInt, true); } -#line 9169 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9360 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 482: -#line 3162 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 491: +#line 3256 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input"); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setSubpass(EbtUint); } -#line 9180 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9371 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 483: -#line 3168 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 492: +#line 3262 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input"); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtSampler; (yyval.interm.type).sampler.setSubpass(EbtUint, true); } -#line 9191 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9382 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 484: -#line 3174 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 493: +#line 3268 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.fcoopmatCheck((yyvsp[0].lex).loc, "fcoopmatNV", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtFloat; (yyval.interm.type).coopmat = true; } -#line 9202 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9393 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 485: -#line 3180 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 494: +#line 3274 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.intcoopmatCheck((yyvsp[0].lex).loc, "icoopmatNV", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtInt; (yyval.interm.type).coopmat = true; } -#line 9213 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9404 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 486: -#line 3186 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 495: +#line 3280 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.intcoopmatCheck((yyvsp[0].lex).loc, "ucoopmatNV", parseContext.symbolTable.atBuiltInLevel()); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); (yyval.interm.type).basicType = EbtUint; (yyval.interm.type).coopmat = true; } -#line 9224 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9415 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 487: -#line 3193 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 496: +#line 3287 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.type) = (yyvsp[0].interm.type); (yyval.interm.type).qualifier.storage = parseContext.symbolTable.atGlobalLevel() ? EvqGlobal : EvqTemporary; parseContext.structTypeCheck((yyval.interm.type).loc, (yyval.interm.type)); } -#line 9234 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9425 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 488: -#line 3198 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 497: +#line 3292 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { // // This is for user defined type names. The lexical phase looked up the @@ -9282,47 +9439,47 @@ yyreduce: } else parseContext.error((yyvsp[0].lex).loc, "expected type name", (yyvsp[0].lex).string->c_str(), ""); } -#line 9252 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9443 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 489: -#line 3214 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 498: +#line 3308 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.profileRequires((yyvsp[0].lex).loc, ENoProfile, 130, 0, "highp precision qualifier"); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); parseContext.handlePrecisionQualifier((yyvsp[0].lex).loc, (yyval.interm.type).qualifier, EpqHigh); } -#line 9262 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9453 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 490: -#line 3219 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 499: +#line 3313 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.profileRequires((yyvsp[0].lex).loc, ENoProfile, 130, 0, "mediump precision qualifier"); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); parseContext.handlePrecisionQualifier((yyvsp[0].lex).loc, (yyval.interm.type).qualifier, EpqMedium); } -#line 9272 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9463 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 491: -#line 3224 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 500: +#line 3318 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.profileRequires((yyvsp[0].lex).loc, ENoProfile, 130, 0, "lowp precision qualifier"); (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); parseContext.handlePrecisionQualifier((yyvsp[0].lex).loc, (yyval.interm.type).qualifier, EpqLow); } -#line 9282 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9473 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 492: -#line 3232 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 501: +#line 3326 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.nestedStructCheck((yyvsp[-2].lex).loc); } -#line 9288 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9479 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 493: -#line 3232 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 502: +#line 3326 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { TType* structure = new TType((yyvsp[-1].interm.typeList), *(yyvsp[-4].lex).string); parseContext.structArrayCheck((yyvsp[-4].lex).loc, *structure); @@ -9334,17 +9491,17 @@ yyreduce: (yyval.interm.type).userDef = structure; --parseContext.structNestingLevel; } -#line 9304 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9495 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 494: -#line 3243 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 503: +#line 3337 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.nestedStructCheck((yyvsp[-1].lex).loc); } -#line 9310 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9501 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 495: -#line 3243 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 504: +#line 3337 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { TType* structure = new TType((yyvsp[-1].interm.typeList), TString("")); (yyval.interm.type).init((yyvsp[-4].lex).loc); @@ -9352,19 +9509,19 @@ yyreduce: (yyval.interm.type).userDef = structure; --parseContext.structNestingLevel; } -#line 9322 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9513 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 496: -#line 3253 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 505: +#line 3347 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.typeList) = (yyvsp[0].interm.typeList); } -#line 9330 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9521 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 497: -#line 3256 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 506: +#line 3350 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.typeList) = (yyvsp[-1].interm.typeList); for (unsigned int i = 0; i < (yyvsp[0].interm.typeList)->size(); ++i) { @@ -9375,11 +9532,11 @@ yyreduce: (yyval.interm.typeList)->push_back((*(yyvsp[0].interm.typeList))[i]); } } -#line 9345 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9536 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 498: -#line 3269 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 507: +#line 3363 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { if ((yyvsp[-2].interm.type).arraySizes) { parseContext.profileRequires((yyvsp[-2].interm.type).loc, ENoProfile, 120, E_GL_3DL_array_objects, "arrayed type"); @@ -9402,11 +9559,11 @@ yyreduce: (*(yyval.interm.typeList))[i].type->shallowCopy(type); } } -#line 9372 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9563 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 499: -#line 3291 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 508: +#line 3385 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { if ((yyvsp[-2].interm.type).arraySizes) { parseContext.profileRequires((yyvsp[-2].interm.type).loc, ENoProfile, 120, E_GL_3DL_array_objects, "arrayed type"); @@ -9431,38 +9588,38 @@ yyreduce: (*(yyval.interm.typeList))[i].type->shallowCopy(type); } } -#line 9401 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9592 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 500: -#line 3318 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 509: +#line 3412 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.typeList) = new TTypeList; (yyval.interm.typeList)->push_back((yyvsp[0].interm.typeLine)); } -#line 9410 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9601 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 501: -#line 3322 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 510: +#line 3416 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.typeList)->push_back((yyvsp[0].interm.typeLine)); } -#line 9418 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9609 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 502: -#line 3328 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 511: +#line 3422 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.typeLine).type = new TType(EbtVoid); (yyval.interm.typeLine).loc = (yyvsp[0].lex).loc; (yyval.interm.typeLine).type->setFieldName(*(yyvsp[0].lex).string); } -#line 9428 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9619 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 503: -#line 3333 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 512: +#line 3427 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.arrayOfArrayVersionCheck((yyvsp[-1].lex).loc, (yyvsp[0].interm).arraySizes); @@ -9471,235 +9628,235 @@ yyreduce: (yyval.interm.typeLine).type->setFieldName(*(yyvsp[-1].lex).string); (yyval.interm.typeLine).type->transferArraySizes((yyvsp[0].interm).arraySizes); } -#line 9441 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9632 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 504: -#line 3344 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 513: +#line 3438 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 9449 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9640 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 505: -#line 3348 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 514: +#line 3442 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { const char* initFeature = "{ } style initializers"; parseContext.requireProfile((yyvsp[-2].lex).loc, ~EEsProfile, initFeature); parseContext.profileRequires((yyvsp[-2].lex).loc, ~EEsProfile, 420, E_GL_ARB_shading_language_420pack, initFeature); (yyval.interm.intermTypedNode) = (yyvsp[-1].interm.intermTypedNode); } -#line 9460 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9651 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 506: -#line 3354 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 515: +#line 3448 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { const char* initFeature = "{ } style initializers"; parseContext.requireProfile((yyvsp[-3].lex).loc, ~EEsProfile, initFeature); parseContext.profileRequires((yyvsp[-3].lex).loc, ~EEsProfile, 420, E_GL_ARB_shading_language_420pack, initFeature); (yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode); } -#line 9471 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 507: -#line 3365 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = parseContext.intermediate.growAggregate(0, (yyvsp[0].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)->getLoc()); - } -#line 9479 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 508: -#line 3368 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = parseContext.intermediate.growAggregate((yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); - } -#line 9487 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 509: -#line 3375 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 9493 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 510: -#line 3379 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 9499 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 511: -#line 3380 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 9505 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 512: -#line 3386 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 9511 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 513: -#line 3387 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 9517 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 514: -#line 3388 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 9523 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 515: -#line 3389 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 9529 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9662 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 516: -#line 3390 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 9535 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 3459 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + (yyval.interm.intermTypedNode) = parseContext.intermediate.growAggregate(0, (yyvsp[0].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)->getLoc()); + } +#line 9670 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 517: -#line 3391 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 9541 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 3462 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { + (yyval.interm.intermTypedNode) = parseContext.intermediate.growAggregate((yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); + } +#line 9678 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 518: -#line 3392 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 3469 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 9547 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9684 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 519: -#line 3394 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 3473 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 9553 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9690 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; case 520: -#line 3400 "MachineIndependent/glslang.y" /* yacc.c:1646 */ +#line 3474 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } +#line 9696 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 521: +#line 3480 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } +#line 9702 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 522: +#line 3481 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } +#line 9708 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 523: +#line 3482 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } +#line 9714 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 524: +#line 3483 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } +#line 9720 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 525: +#line 3484 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } +#line 9726 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 526: +#line 3485 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } +#line 9732 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 527: +#line 3486 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } +#line 9738 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 528: +#line 3488 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } +#line 9744 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ + break; + + case 529: +#line 3494 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.requireStage((yyvsp[-1].lex).loc, EShLangFragment, "demote"); parseContext.requireExtensions((yyvsp[-1].lex).loc, 1, &E_GL_EXT_demote_to_helper_invocation, "demote"); (yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpDemote, (yyvsp[-1].lex).loc); } -#line 9563 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9754 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 521: -#line 3409 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 530: +#line 3503 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermNode) = 0; } -#line 9569 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9760 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 522: -#line 3410 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 531: +#line 3504 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.symbolTable.push(); ++parseContext.statementNestingLevel; } -#line 9578 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9769 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 523: -#line 3414 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 532: +#line 3508 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.symbolTable.pop(&parseContext.defaultPrecision[0]); --parseContext.statementNestingLevel; } -#line 9587 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9778 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 524: -#line 3418 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 533: +#line 3512 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { if ((yyvsp[-2].interm.intermNode) && (yyvsp[-2].interm.intermNode)->getAsAggregate()) (yyvsp[-2].interm.intermNode)->getAsAggregate()->setOperator(EOpSequence); (yyval.interm.intermNode) = (yyvsp[-2].interm.intermNode); } -#line 9597 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9788 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 525: -#line 3426 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 534: +#line 3520 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 9603 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9794 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 526: -#line 3427 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 535: +#line 3521 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 9609 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9800 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 527: -#line 3431 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 536: +#line 3525 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { ++parseContext.controlFlowNestingLevel; } -#line 9617 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9808 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 528: -#line 3434 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 537: +#line 3528 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { --parseContext.controlFlowNestingLevel; (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 9626 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9817 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 529: -#line 3438 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 538: +#line 3532 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.symbolTable.push(); ++parseContext.statementNestingLevel; ++parseContext.controlFlowNestingLevel; } -#line 9636 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9827 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 530: -#line 3443 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 539: +#line 3537 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.symbolTable.pop(&parseContext.defaultPrecision[0]); --parseContext.statementNestingLevel; --parseContext.controlFlowNestingLevel; (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 9647 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9838 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 531: -#line 3452 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 540: +#line 3546 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermNode) = 0; } -#line 9655 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9846 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 532: -#line 3455 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 541: +#line 3549 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { if ((yyvsp[-1].interm.intermNode) && (yyvsp[-1].interm.intermNode)->getAsAggregate()) (yyvsp[-1].interm.intermNode)->getAsAggregate()->setOperator(EOpSequence); (yyval.interm.intermNode) = (yyvsp[-1].interm.intermNode); } -#line 9665 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9856 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 533: -#line 3463 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 542: +#line 3557 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermNode) = parseContext.intermediate.makeAggregate((yyvsp[0].interm.intermNode)); if ((yyvsp[0].interm.intermNode) && (yyvsp[0].interm.intermNode)->getAsBranchNode() && ((yyvsp[0].interm.intermNode)->getAsBranchNode()->getFlowOp() == EOpCase || @@ -9708,11 +9865,11 @@ yyreduce: (yyval.interm.intermNode) = 0; // start a fresh subsequence for what's after this case } } -#line 9678 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9869 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 534: -#line 3471 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 543: +#line 3565 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { if ((yyvsp[0].interm.intermNode) && (yyvsp[0].interm.intermNode)->getAsBranchNode() && ((yyvsp[0].interm.intermNode)->getAsBranchNode()->getFlowOp() == EOpCase || (yyvsp[0].interm.intermNode)->getAsBranchNode()->getFlowOp() == EOpDefault)) { @@ -9721,76 +9878,76 @@ yyreduce: } else (yyval.interm.intermNode) = parseContext.intermediate.growAggregate((yyvsp[-1].interm.intermNode), (yyvsp[0].interm.intermNode)); } -#line 9691 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9882 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 535: -#line 3482 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 544: +#line 3576 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermNode) = 0; } -#line 9697 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9888 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 536: -#line 3483 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 545: +#line 3577 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermNode) = static_cast((yyvsp[-1].interm.intermTypedNode)); } -#line 9703 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9894 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 537: -#line 3487 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 546: +#line 3581 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 9711 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9902 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 538: -#line 3491 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 547: +#line 3585 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.handleSelectionAttributes(*(yyvsp[-1].interm.attributes), (yyvsp[0].interm.intermNode)); (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 9720 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9911 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 539: -#line 3498 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 548: +#line 3592 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.boolCheck((yyvsp[-4].lex).loc, (yyvsp[-2].interm.intermTypedNode)); (yyval.interm.intermNode) = parseContext.intermediate.addSelection((yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.nodePair), (yyvsp[-4].lex).loc); } -#line 9729 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9920 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 540: -#line 3505 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 549: +#line 3599 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.nodePair).node1 = (yyvsp[-2].interm.intermNode); (yyval.interm.nodePair).node2 = (yyvsp[0].interm.intermNode); } -#line 9738 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9929 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 541: -#line 3509 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 550: +#line 3603 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.nodePair).node1 = (yyvsp[0].interm.intermNode); (yyval.interm.nodePair).node2 = 0; } -#line 9747 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9938 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 542: -#line 3517 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 551: +#line 3611 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); parseContext.boolCheck((yyvsp[0].interm.intermTypedNode)->getLoc(), (yyvsp[0].interm.intermTypedNode)); } -#line 9756 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9947 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 543: -#line 3521 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 552: +#line 3615 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.boolCheck((yyvsp[-2].lex).loc, (yyvsp[-3].interm.type)); @@ -9801,28 +9958,28 @@ yyreduce: else (yyval.interm.intermTypedNode) = 0; } -#line 9771 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9962 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 544: -#line 3534 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 553: +#line 3628 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 9779 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9970 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 545: -#line 3538 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 554: +#line 3632 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.handleSwitchAttributes(*(yyvsp[-1].interm.attributes), (yyvsp[0].interm.intermNode)); (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 9788 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9979 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 546: -#line 3545 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 555: +#line 3639 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { // start new switch sequence on the switch stack ++parseContext.controlFlowNestingLevel; @@ -9831,11 +9988,11 @@ yyreduce: parseContext.switchLevel.push_back(parseContext.statementNestingLevel); parseContext.symbolTable.push(); } -#line 9801 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 9992 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 547: -#line 3553 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 556: +#line 3647 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermNode) = parseContext.addSwitch((yyvsp[-7].lex).loc, (yyvsp[-5].interm.intermTypedNode), (yyvsp[-1].interm.intermNode) ? (yyvsp[-1].interm.intermNode)->getAsAggregate() : 0); delete parseContext.switchSequenceStack.back(); @@ -9845,27 +10002,27 @@ yyreduce: --parseContext.statementNestingLevel; --parseContext.controlFlowNestingLevel; } -#line 9815 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10006 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 548: -#line 3565 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 557: +#line 3659 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermNode) = 0; } -#line 9823 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10014 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 549: -#line 3568 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 558: +#line 3662 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 9831 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10022 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 550: -#line 3574 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 559: +#line 3668 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermNode) = 0; if (parseContext.switchLevel.size() == 0) @@ -9878,11 +10035,11 @@ yyreduce: (yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpCase, (yyvsp[-1].interm.intermTypedNode), (yyvsp[-2].lex).loc); } } -#line 9848 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10039 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 551: -#line 3586 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 560: +#line 3680 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermNode) = 0; if (parseContext.switchLevel.size() == 0) @@ -9892,28 +10049,28 @@ yyreduce: else (yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpDefault, (yyvsp[-1].lex).loc); } -#line 9862 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10053 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 552: -#line 3598 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 561: +#line 3692 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 9870 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10061 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 553: -#line 3602 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 562: +#line 3696 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.handleLoopAttributes(*(yyvsp[-1].interm.attributes), (yyvsp[0].interm.intermNode)); (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 9879 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10070 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 554: -#line 3609 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 563: +#line 3703 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { if (! parseContext.limits.whileLoops) parseContext.error((yyvsp[-1].lex).loc, "while loops not available", "limitation", ""); @@ -9922,11 +10079,11 @@ yyreduce: ++parseContext.statementNestingLevel; ++parseContext.controlFlowNestingLevel; } -#line 9892 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10083 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 555: -#line 3617 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 564: +#line 3711 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.symbolTable.pop(&parseContext.defaultPrecision[0]); (yyval.interm.intermNode) = parseContext.intermediate.addLoop((yyvsp[0].interm.intermNode), (yyvsp[-2].interm.intermTypedNode), 0, true, (yyvsp[-5].lex).loc); @@ -9934,21 +10091,21 @@ yyreduce: --parseContext.statementNestingLevel; --parseContext.controlFlowNestingLevel; } -#line 9904 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10095 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 556: -#line 3624 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 565: +#line 3718 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { ++parseContext.loopNestingLevel; ++parseContext.statementNestingLevel; ++parseContext.controlFlowNestingLevel; } -#line 9914 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10105 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 557: -#line 3629 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 566: +#line 3723 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { if (! parseContext.limits.whileLoops) parseContext.error((yyvsp[-7].lex).loc, "do-while loops not available", "limitation", ""); @@ -9960,22 +10117,22 @@ yyreduce: --parseContext.statementNestingLevel; --parseContext.controlFlowNestingLevel; } -#line 9930 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10121 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 558: -#line 3640 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 567: +#line 3734 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.symbolTable.push(); ++parseContext.loopNestingLevel; ++parseContext.statementNestingLevel; ++parseContext.controlFlowNestingLevel; } -#line 9941 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10132 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 559: -#line 3646 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 568: +#line 3740 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.symbolTable.pop(&parseContext.defaultPrecision[0]); (yyval.interm.intermNode) = parseContext.intermediate.makeAggregate((yyvsp[-3].interm.intermNode), (yyvsp[-5].lex).loc); @@ -9988,81 +10145,81 @@ yyreduce: --parseContext.statementNestingLevel; --parseContext.controlFlowNestingLevel; } -#line 9958 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10149 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 560: -#line 3661 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 569: +#line 3755 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 9966 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10157 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 561: -#line 3664 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 570: +#line 3758 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 9974 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10165 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 562: -#line 3670 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 571: +#line 3764 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 9982 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10173 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 563: -#line 3673 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 572: +#line 3767 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermTypedNode) = 0; } -#line 9990 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10181 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 564: -#line 3679 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 573: +#line 3773 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.nodePair).node1 = (yyvsp[-1].interm.intermTypedNode); (yyval.interm.nodePair).node2 = 0; } -#line 9999 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10190 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 565: -#line 3683 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 574: +#line 3777 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.nodePair).node1 = (yyvsp[-2].interm.intermTypedNode); (yyval.interm.nodePair).node2 = (yyvsp[0].interm.intermTypedNode); } -#line 10008 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10199 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 566: -#line 3690 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 575: +#line 3784 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { if (parseContext.loopNestingLevel <= 0) parseContext.error((yyvsp[-1].lex).loc, "continue statement only allowed in loops", "", ""); (yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpContinue, (yyvsp[-1].lex).loc); } -#line 10018 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10209 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 567: -#line 3695 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 576: +#line 3789 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { if (parseContext.loopNestingLevel + parseContext.switchSequenceStack.size() <= 0) parseContext.error((yyvsp[-1].lex).loc, "break statement only allowed in switch and loops", "", ""); (yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpBreak, (yyvsp[-1].lex).loc); } -#line 10028 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10219 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 568: -#line 3700 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 577: +#line 3794 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpReturn, (yyvsp[-1].lex).loc); if (parseContext.currentFunctionType->getBasicType() != EbtVoid) @@ -10070,83 +10227,91 @@ yyreduce: if (parseContext.inMain) parseContext.postEntryPointReturn = true; } -#line 10040 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10231 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 569: -#line 3707 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 578: +#line 3801 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermNode) = parseContext.handleReturnValue((yyvsp[-2].lex).loc, (yyvsp[-1].interm.intermTypedNode)); } -#line 10048 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10239 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 570: -#line 3710 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 579: +#line 3804 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.requireStage((yyvsp[-1].lex).loc, EShLangFragment, "discard"); (yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpKill, (yyvsp[-1].lex).loc); } -#line 10057 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10248 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 571: -#line 3719 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 580: +#line 3813 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); parseContext.intermediate.setTreeRoot((yyval.interm.intermNode)); } -#line 10066 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10257 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 572: -#line 3723 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 581: +#line 3817 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { if ((yyvsp[0].interm.intermNode) != nullptr) { (yyval.interm.intermNode) = parseContext.intermediate.growAggregate((yyvsp[-1].interm.intermNode), (yyvsp[0].interm.intermNode)); parseContext.intermediate.setTreeRoot((yyval.interm.intermNode)); } } -#line 10077 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10268 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 573: -#line 3732 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 582: +#line 3826 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 10085 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10276 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 574: -#line 3735 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 583: +#line 3829 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 10093 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10284 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 575: -#line 3739 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 584: +#line 3833 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { parseContext.requireProfile((yyvsp[0].lex).loc, ~EEsProfile, "extraneous semicolon"); parseContext.profileRequires((yyvsp[0].lex).loc, ~EEsProfile, 460, nullptr, "extraneous semicolon"); (yyval.interm.intermNode) = nullptr; } -#line 10103 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10294 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 576: -#line 3748 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 585: +#line 3842 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyvsp[0].interm).function = parseContext.handleFunctionDeclarator((yyvsp[0].interm).loc, *(yyvsp[0].interm).function, false /* not prototype */); (yyvsp[0].interm).intermNode = parseContext.handleFunctionDefinition((yyvsp[0].interm).loc, *(yyvsp[0].interm).function); + + // For ES 100 only, according to ES shading language 100 spec: A function + // body has a scope nested inside the function's definition. + if (parseContext.profile == EEsProfile && parseContext.version == 100) + { + parseContext.symbolTable.push(); + ++parseContext.statementNestingLevel; + } } -#line 10112 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10311 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 577: -#line 3752 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 586: +#line 3854 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { // May be best done as post process phase on intermediate code if (parseContext.currentFunctionType->getBasicType() != EbtVoid && ! parseContext.functionReturnsValue) @@ -10161,53 +10326,64 @@ yyreduce: (yyval.interm.intermNode)->getAsAggregate()->setOptimize(parseContext.contextPragma.optimize); (yyval.interm.intermNode)->getAsAggregate()->setDebug(parseContext.contextPragma.debug); (yyval.interm.intermNode)->getAsAggregate()->setPragmaTable(parseContext.contextPragma.pragmaTable); + + // Set currentFunctionType to empty pointer when goes outside of the function + parseContext.currentFunctionType = nullptr; + + // For ES 100 only, according to ES shading language 100 spec: A function + // body has a scope nested inside the function's definition. + if (parseContext.profile == EEsProfile && parseContext.version == 100) + { + parseContext.symbolTable.pop(&parseContext.defaultPrecision[0]); + --parseContext.statementNestingLevel; + } } -#line 10132 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10342 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 578: -#line 3771 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 587: +#line 3884 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.attributes) = (yyvsp[-2].interm.attributes); parseContext.requireExtensions((yyvsp[-4].lex).loc, 1, &E_GL_EXT_control_flow_attributes, "attribute"); } -#line 10141 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10351 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 579: -#line 3777 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 588: +#line 3890 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.attributes) = (yyvsp[0].interm.attributes); } -#line 10149 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10359 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 580: -#line 3780 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 589: +#line 3893 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.attributes) = parseContext.mergeAttributes((yyvsp[-2].interm.attributes), (yyvsp[0].interm.attributes)); } -#line 10157 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10367 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 581: -#line 3785 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 590: +#line 3898 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.attributes) = parseContext.makeAttributes(*(yyvsp[0].lex).string); } -#line 10165 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10375 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; - case 582: -#line 3788 "MachineIndependent/glslang.y" /* yacc.c:1646 */ + case 591: +#line 3901 "MachineIndependent/glslang.y" /* yacc.c:1646 */ { (yyval.interm.attributes) = parseContext.makeAttributes(*(yyvsp[-3].lex).string, (yyvsp[-1].interm.intermTypedNode)); } -#line 10173 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10383 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ break; -#line 10177 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ +#line 10387 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ default: break; } /* User semantic actions sometimes alter yychar, and that requires @@ -10435,5 +10611,5 @@ yyreturn: #endif return yyresult; } -#line 3793 "MachineIndependent/glslang.y" /* yacc.c:1906 */ +#line 3906 "MachineIndependent/glslang.y" /* yacc.c:1906 */ diff --git a/thirdparty/glslang/glslang/MachineIndependent/glslang_tab.cpp.h b/thirdparty/glslang/glslang/MachineIndependent/glslang_tab.cpp.h index f4f41147305..78b72a246bd 100644 --- a/thirdparty/glslang/glslang/MachineIndependent/glslang_tab.cpp.h +++ b/thirdparty/glslang/glslang/MachineIndependent/glslang_tab.cpp.h @@ -203,256 +203,265 @@ extern int yydebug; F64MAT4X4 = 413, ATOMIC_UINT = 414, ACCSTRUCTNV = 415, - FCOOPMATNV = 416, - ICOOPMATNV = 417, - UCOOPMATNV = 418, - SAMPLERCUBEARRAY = 419, - SAMPLERCUBEARRAYSHADOW = 420, - ISAMPLERCUBEARRAY = 421, - USAMPLERCUBEARRAY = 422, - SAMPLER1D = 423, - SAMPLER1DARRAY = 424, - SAMPLER1DARRAYSHADOW = 425, - ISAMPLER1D = 426, - SAMPLER1DSHADOW = 427, - SAMPLER2DRECT = 428, - SAMPLER2DRECTSHADOW = 429, - ISAMPLER2DRECT = 430, - USAMPLER2DRECT = 431, - SAMPLERBUFFER = 432, - ISAMPLERBUFFER = 433, - USAMPLERBUFFER = 434, - SAMPLER2DMS = 435, - ISAMPLER2DMS = 436, - USAMPLER2DMS = 437, - SAMPLER2DMSARRAY = 438, - ISAMPLER2DMSARRAY = 439, - USAMPLER2DMSARRAY = 440, - SAMPLEREXTERNALOES = 441, - SAMPLEREXTERNAL2DY2YEXT = 442, - ISAMPLER1DARRAY = 443, - USAMPLER1D = 444, - USAMPLER1DARRAY = 445, - F16SAMPLER1D = 446, - F16SAMPLER2D = 447, - F16SAMPLER3D = 448, - F16SAMPLER2DRECT = 449, - F16SAMPLERCUBE = 450, - F16SAMPLER1DARRAY = 451, - F16SAMPLER2DARRAY = 452, - F16SAMPLERCUBEARRAY = 453, - F16SAMPLERBUFFER = 454, - F16SAMPLER2DMS = 455, - F16SAMPLER2DMSARRAY = 456, - F16SAMPLER1DSHADOW = 457, - F16SAMPLER2DSHADOW = 458, - F16SAMPLER1DARRAYSHADOW = 459, - F16SAMPLER2DARRAYSHADOW = 460, - F16SAMPLER2DRECTSHADOW = 461, - F16SAMPLERCUBESHADOW = 462, - F16SAMPLERCUBEARRAYSHADOW = 463, - IMAGE1D = 464, - IIMAGE1D = 465, - UIMAGE1D = 466, - IMAGE2D = 467, - IIMAGE2D = 468, - UIMAGE2D = 469, - IMAGE3D = 470, - IIMAGE3D = 471, - UIMAGE3D = 472, - IMAGE2DRECT = 473, - IIMAGE2DRECT = 474, - UIMAGE2DRECT = 475, - IMAGECUBE = 476, - IIMAGECUBE = 477, - UIMAGECUBE = 478, - IMAGEBUFFER = 479, - IIMAGEBUFFER = 480, - UIMAGEBUFFER = 481, - IMAGE1DARRAY = 482, - IIMAGE1DARRAY = 483, - UIMAGE1DARRAY = 484, - IMAGE2DARRAY = 485, - IIMAGE2DARRAY = 486, - UIMAGE2DARRAY = 487, - IMAGECUBEARRAY = 488, - IIMAGECUBEARRAY = 489, - UIMAGECUBEARRAY = 490, - IMAGE2DMS = 491, - IIMAGE2DMS = 492, - UIMAGE2DMS = 493, - IMAGE2DMSARRAY = 494, - IIMAGE2DMSARRAY = 495, - UIMAGE2DMSARRAY = 496, - F16IMAGE1D = 497, - F16IMAGE2D = 498, - F16IMAGE3D = 499, - F16IMAGE2DRECT = 500, - F16IMAGECUBE = 501, - F16IMAGE1DARRAY = 502, - F16IMAGE2DARRAY = 503, - F16IMAGECUBEARRAY = 504, - F16IMAGEBUFFER = 505, - F16IMAGE2DMS = 506, - F16IMAGE2DMSARRAY = 507, - TEXTURECUBEARRAY = 508, - ITEXTURECUBEARRAY = 509, - UTEXTURECUBEARRAY = 510, - TEXTURE1D = 511, - ITEXTURE1D = 512, - UTEXTURE1D = 513, - TEXTURE1DARRAY = 514, - ITEXTURE1DARRAY = 515, - UTEXTURE1DARRAY = 516, - TEXTURE2DRECT = 517, - ITEXTURE2DRECT = 518, - UTEXTURE2DRECT = 519, - TEXTUREBUFFER = 520, - ITEXTUREBUFFER = 521, - UTEXTUREBUFFER = 522, - TEXTURE2DMS = 523, - ITEXTURE2DMS = 524, - UTEXTURE2DMS = 525, - TEXTURE2DMSARRAY = 526, - ITEXTURE2DMSARRAY = 527, - UTEXTURE2DMSARRAY = 528, - F16TEXTURE1D = 529, - F16TEXTURE2D = 530, - F16TEXTURE3D = 531, - F16TEXTURE2DRECT = 532, - F16TEXTURECUBE = 533, - F16TEXTURE1DARRAY = 534, - F16TEXTURE2DARRAY = 535, - F16TEXTURECUBEARRAY = 536, - F16TEXTUREBUFFER = 537, - F16TEXTURE2DMS = 538, - F16TEXTURE2DMSARRAY = 539, - SUBPASSINPUT = 540, - SUBPASSINPUTMS = 541, - ISUBPASSINPUT = 542, - ISUBPASSINPUTMS = 543, - USUBPASSINPUT = 544, - USUBPASSINPUTMS = 545, - F16SUBPASSINPUT = 546, - F16SUBPASSINPUTMS = 547, - LEFT_OP = 548, - RIGHT_OP = 549, - INC_OP = 550, - DEC_OP = 551, - LE_OP = 552, - GE_OP = 553, - EQ_OP = 554, - NE_OP = 555, - AND_OP = 556, - OR_OP = 557, - XOR_OP = 558, - MUL_ASSIGN = 559, - DIV_ASSIGN = 560, - ADD_ASSIGN = 561, - MOD_ASSIGN = 562, - LEFT_ASSIGN = 563, - RIGHT_ASSIGN = 564, - AND_ASSIGN = 565, - XOR_ASSIGN = 566, - OR_ASSIGN = 567, - SUB_ASSIGN = 568, - LEFT_PAREN = 569, - RIGHT_PAREN = 570, - LEFT_BRACKET = 571, - RIGHT_BRACKET = 572, - LEFT_BRACE = 573, - RIGHT_BRACE = 574, - DOT = 575, - COMMA = 576, - COLON = 577, - EQUAL = 578, - SEMICOLON = 579, - BANG = 580, - DASH = 581, - TILDE = 582, - PLUS = 583, - STAR = 584, - SLASH = 585, - PERCENT = 586, - LEFT_ANGLE = 587, - RIGHT_ANGLE = 588, - VERTICAL_BAR = 589, - CARET = 590, - AMPERSAND = 591, - QUESTION = 592, - INVARIANT = 593, - HIGH_PRECISION = 594, - MEDIUM_PRECISION = 595, - LOW_PRECISION = 596, - PRECISION = 597, - PACKED = 598, - RESOURCE = 599, - SUPERP = 600, - FLOATCONSTANT = 601, - INTCONSTANT = 602, - UINTCONSTANT = 603, - BOOLCONSTANT = 604, - IDENTIFIER = 605, - TYPE_NAME = 606, - CENTROID = 607, - IN = 608, - OUT = 609, - INOUT = 610, - STRUCT = 611, - VOID = 612, - WHILE = 613, - BREAK = 614, - CONTINUE = 615, - DO = 616, - ELSE = 617, - FOR = 618, - IF = 619, - DISCARD = 620, - RETURN = 621, - SWITCH = 622, - CASE = 623, - DEFAULT = 624, - UNIFORM = 625, - SHARED = 626, - BUFFER = 627, - FLAT = 628, - SMOOTH = 629, - LAYOUT = 630, - DOUBLECONSTANT = 631, - INT16CONSTANT = 632, - UINT16CONSTANT = 633, - FLOAT16CONSTANT = 634, - INT32CONSTANT = 635, - UINT32CONSTANT = 636, - INT64CONSTANT = 637, - UINT64CONSTANT = 638, - SUBROUTINE = 639, - DEMOTE = 640, - PAYLOADNV = 641, - PAYLOADINNV = 642, - HITATTRNV = 643, - CALLDATANV = 644, - CALLDATAINNV = 645, - PATCH = 646, - SAMPLE = 647, - NONUNIFORM = 648, - COHERENT = 649, - VOLATILE = 650, - RESTRICT = 651, - READONLY = 652, - WRITEONLY = 653, - DEVICECOHERENT = 654, - QUEUEFAMILYCOHERENT = 655, - WORKGROUPCOHERENT = 656, - SUBGROUPCOHERENT = 657, - NONPRIVATE = 658, - NOPERSPECTIVE = 659, - EXPLICITINTERPAMD = 660, - PERVERTEXNV = 661, - PERPRIMITIVENV = 662, - PERVIEWNV = 663, - PERTASKNV = 664, - PRECISE = 665 + ACCSTRUCTEXT = 416, + RAYQUERYEXT = 417, + FCOOPMATNV = 418, + ICOOPMATNV = 419, + UCOOPMATNV = 420, + SAMPLERCUBEARRAY = 421, + SAMPLERCUBEARRAYSHADOW = 422, + ISAMPLERCUBEARRAY = 423, + USAMPLERCUBEARRAY = 424, + SAMPLER1D = 425, + SAMPLER1DARRAY = 426, + SAMPLER1DARRAYSHADOW = 427, + ISAMPLER1D = 428, + SAMPLER1DSHADOW = 429, + SAMPLER2DRECT = 430, + SAMPLER2DRECTSHADOW = 431, + ISAMPLER2DRECT = 432, + USAMPLER2DRECT = 433, + SAMPLERBUFFER = 434, + ISAMPLERBUFFER = 435, + USAMPLERBUFFER = 436, + SAMPLER2DMS = 437, + ISAMPLER2DMS = 438, + USAMPLER2DMS = 439, + SAMPLER2DMSARRAY = 440, + ISAMPLER2DMSARRAY = 441, + USAMPLER2DMSARRAY = 442, + SAMPLEREXTERNALOES = 443, + SAMPLEREXTERNAL2DY2YEXT = 444, + ISAMPLER1DARRAY = 445, + USAMPLER1D = 446, + USAMPLER1DARRAY = 447, + F16SAMPLER1D = 448, + F16SAMPLER2D = 449, + F16SAMPLER3D = 450, + F16SAMPLER2DRECT = 451, + F16SAMPLERCUBE = 452, + F16SAMPLER1DARRAY = 453, + F16SAMPLER2DARRAY = 454, + F16SAMPLERCUBEARRAY = 455, + F16SAMPLERBUFFER = 456, + F16SAMPLER2DMS = 457, + F16SAMPLER2DMSARRAY = 458, + F16SAMPLER1DSHADOW = 459, + F16SAMPLER2DSHADOW = 460, + F16SAMPLER1DARRAYSHADOW = 461, + F16SAMPLER2DARRAYSHADOW = 462, + F16SAMPLER2DRECTSHADOW = 463, + F16SAMPLERCUBESHADOW = 464, + F16SAMPLERCUBEARRAYSHADOW = 465, + IMAGE1D = 466, + IIMAGE1D = 467, + UIMAGE1D = 468, + IMAGE2D = 469, + IIMAGE2D = 470, + UIMAGE2D = 471, + IMAGE3D = 472, + IIMAGE3D = 473, + UIMAGE3D = 474, + IMAGE2DRECT = 475, + IIMAGE2DRECT = 476, + UIMAGE2DRECT = 477, + IMAGECUBE = 478, + IIMAGECUBE = 479, + UIMAGECUBE = 480, + IMAGEBUFFER = 481, + IIMAGEBUFFER = 482, + UIMAGEBUFFER = 483, + IMAGE1DARRAY = 484, + IIMAGE1DARRAY = 485, + UIMAGE1DARRAY = 486, + IMAGE2DARRAY = 487, + IIMAGE2DARRAY = 488, + UIMAGE2DARRAY = 489, + IMAGECUBEARRAY = 490, + IIMAGECUBEARRAY = 491, + UIMAGECUBEARRAY = 492, + IMAGE2DMS = 493, + IIMAGE2DMS = 494, + UIMAGE2DMS = 495, + IMAGE2DMSARRAY = 496, + IIMAGE2DMSARRAY = 497, + UIMAGE2DMSARRAY = 498, + F16IMAGE1D = 499, + F16IMAGE2D = 500, + F16IMAGE3D = 501, + F16IMAGE2DRECT = 502, + F16IMAGECUBE = 503, + F16IMAGE1DARRAY = 504, + F16IMAGE2DARRAY = 505, + F16IMAGECUBEARRAY = 506, + F16IMAGEBUFFER = 507, + F16IMAGE2DMS = 508, + F16IMAGE2DMSARRAY = 509, + TEXTURECUBEARRAY = 510, + ITEXTURECUBEARRAY = 511, + UTEXTURECUBEARRAY = 512, + TEXTURE1D = 513, + ITEXTURE1D = 514, + UTEXTURE1D = 515, + TEXTURE1DARRAY = 516, + ITEXTURE1DARRAY = 517, + UTEXTURE1DARRAY = 518, + TEXTURE2DRECT = 519, + ITEXTURE2DRECT = 520, + UTEXTURE2DRECT = 521, + TEXTUREBUFFER = 522, + ITEXTUREBUFFER = 523, + UTEXTUREBUFFER = 524, + TEXTURE2DMS = 525, + ITEXTURE2DMS = 526, + UTEXTURE2DMS = 527, + TEXTURE2DMSARRAY = 528, + ITEXTURE2DMSARRAY = 529, + UTEXTURE2DMSARRAY = 530, + F16TEXTURE1D = 531, + F16TEXTURE2D = 532, + F16TEXTURE3D = 533, + F16TEXTURE2DRECT = 534, + F16TEXTURECUBE = 535, + F16TEXTURE1DARRAY = 536, + F16TEXTURE2DARRAY = 537, + F16TEXTURECUBEARRAY = 538, + F16TEXTUREBUFFER = 539, + F16TEXTURE2DMS = 540, + F16TEXTURE2DMSARRAY = 541, + SUBPASSINPUT = 542, + SUBPASSINPUTMS = 543, + ISUBPASSINPUT = 544, + ISUBPASSINPUTMS = 545, + USUBPASSINPUT = 546, + USUBPASSINPUTMS = 547, + F16SUBPASSINPUT = 548, + F16SUBPASSINPUTMS = 549, + LEFT_OP = 550, + RIGHT_OP = 551, + INC_OP = 552, + DEC_OP = 553, + LE_OP = 554, + GE_OP = 555, + EQ_OP = 556, + NE_OP = 557, + AND_OP = 558, + OR_OP = 559, + XOR_OP = 560, + MUL_ASSIGN = 561, + DIV_ASSIGN = 562, + ADD_ASSIGN = 563, + MOD_ASSIGN = 564, + LEFT_ASSIGN = 565, + RIGHT_ASSIGN = 566, + AND_ASSIGN = 567, + XOR_ASSIGN = 568, + OR_ASSIGN = 569, + SUB_ASSIGN = 570, + STRING_LITERAL = 571, + LEFT_PAREN = 572, + RIGHT_PAREN = 573, + LEFT_BRACKET = 574, + RIGHT_BRACKET = 575, + LEFT_BRACE = 576, + RIGHT_BRACE = 577, + DOT = 578, + COMMA = 579, + COLON = 580, + EQUAL = 581, + SEMICOLON = 582, + BANG = 583, + DASH = 584, + TILDE = 585, + PLUS = 586, + STAR = 587, + SLASH = 588, + PERCENT = 589, + LEFT_ANGLE = 590, + RIGHT_ANGLE = 591, + VERTICAL_BAR = 592, + CARET = 593, + AMPERSAND = 594, + QUESTION = 595, + INVARIANT = 596, + HIGH_PRECISION = 597, + MEDIUM_PRECISION = 598, + LOW_PRECISION = 599, + PRECISION = 600, + PACKED = 601, + RESOURCE = 602, + SUPERP = 603, + FLOATCONSTANT = 604, + INTCONSTANT = 605, + UINTCONSTANT = 606, + BOOLCONSTANT = 607, + IDENTIFIER = 608, + TYPE_NAME = 609, + CENTROID = 610, + IN = 611, + OUT = 612, + INOUT = 613, + STRUCT = 614, + VOID = 615, + WHILE = 616, + BREAK = 617, + CONTINUE = 618, + DO = 619, + ELSE = 620, + FOR = 621, + IF = 622, + DISCARD = 623, + RETURN = 624, + SWITCH = 625, + CASE = 626, + DEFAULT = 627, + UNIFORM = 628, + SHARED = 629, + BUFFER = 630, + FLAT = 631, + SMOOTH = 632, + LAYOUT = 633, + DOUBLECONSTANT = 634, + INT16CONSTANT = 635, + UINT16CONSTANT = 636, + FLOAT16CONSTANT = 637, + INT32CONSTANT = 638, + UINT32CONSTANT = 639, + INT64CONSTANT = 640, + UINT64CONSTANT = 641, + SUBROUTINE = 642, + DEMOTE = 643, + PAYLOADNV = 644, + PAYLOADINNV = 645, + HITATTRNV = 646, + CALLDATANV = 647, + CALLDATAINNV = 648, + PAYLOADEXT = 649, + PAYLOADINEXT = 650, + HITATTREXT = 651, + CALLDATAEXT = 652, + CALLDATAINEXT = 653, + PATCH = 654, + SAMPLE = 655, + NONUNIFORM = 656, + COHERENT = 657, + VOLATILE = 658, + RESTRICT = 659, + READONLY = 660, + WRITEONLY = 661, + DEVICECOHERENT = 662, + QUEUEFAMILYCOHERENT = 663, + WORKGROUPCOHERENT = 664, + SUBGROUPCOHERENT = 665, + NONPRIVATE = 666, + SHADERCALLCOHERENT = 667, + NOPERSPECTIVE = 668, + EXPLICITINTERPAMD = 669, + PERVERTEXNV = 670, + PERPRIMITIVENV = 671, + PERVIEWNV = 672, + PERTASKNV = 673, + PRECISE = 674 }; #endif @@ -461,7 +470,7 @@ extern int yydebug; union YYSTYPE { -#line 96 "MachineIndependent/glslang.y" /* yacc.c:1909 */ +#line 97 "MachineIndependent/glslang.y" /* yacc.c:1909 */ struct { glslang::TSourceLoc loc; @@ -497,7 +506,7 @@ union YYSTYPE glslang::TArraySizes* typeParameters; } interm; -#line 501 "MachineIndependent/glslang_tab.cpp.h" /* yacc.c:1909 */ +#line 510 "MachineIndependent/glslang_tab.cpp.h" /* yacc.c:1909 */ }; typedef union YYSTYPE YYSTYPE; diff --git a/thirdparty/glslang/glslang/MachineIndependent/intermOut.cpp b/thirdparty/glslang/glslang/MachineIndependent/intermOut.cpp index 3a93aedafb6..f23a7058ab7 100644 --- a/thirdparty/glslang/glslang/MachineIndependent/intermOut.cpp +++ b/thirdparty/glslang/glslang/MachineIndependent/intermOut.cpp @@ -2,6 +2,7 @@ // Copyright (C) 2002-2005 3Dlabs Inc. Ltd. // Copyright (C) 2012-2016 LunarG, Inc. // Copyright (C) 2017 ARM Limited. +// Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved. // // All rights reserved. // @@ -35,7 +36,7 @@ // POSSIBILITY OF SUCH DAMAGE. // -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) #include "localintermediate.h" #include "../Include/InfoSink.h" @@ -1078,18 +1079,43 @@ bool TOutputTraverser::visitAggregate(TVisit /* visit */, TIntermAggregate* node case EOpSubpassLoad: out.debug << "subpassLoad"; break; case EOpSubpassLoadMS: out.debug << "subpassLoadMS"; break; - case EOpTraceNV: out.debug << "traceNV"; break; - case EOpReportIntersectionNV: out.debug << "reportIntersectionNV"; break; - case EOpIgnoreIntersectionNV: out.debug << "ignoreIntersectionNV"; break; - case EOpTerminateRayNV: out.debug << "terminateRayNV"; break; - case EOpExecuteCallableNV: out.debug << "executeCallableNV"; break; + case EOpTrace: out.debug << "traceNV"; break; + case EOpReportIntersection: out.debug << "reportIntersectionNV"; break; + case EOpIgnoreIntersection: out.debug << "ignoreIntersectionNV"; break; + case EOpTerminateRay: out.debug << "terminateRayNV"; break; + case EOpExecuteCallable: out.debug << "executeCallableNV"; break; case EOpWritePackedPrimitiveIndices4x8NV: out.debug << "writePackedPrimitiveIndices4x8NV"; break; + case EOpRayQueryInitialize: out.debug << "rayQueryInitializeEXT"; break; + case EOpRayQueryTerminate: out.debug << "rayQueryTerminateEXT"; break; + case EOpRayQueryGenerateIntersection: out.debug << "rayQueryGenerateIntersectionEXT"; break; + case EOpRayQueryConfirmIntersection: out.debug << "rayQueryConfirmIntersectionEXT"; break; + case EOpRayQueryProceed: out.debug << "rayQueryProceedEXT"; break; + case EOpRayQueryGetIntersectionType: out.debug << "rayQueryGetIntersectionTypeEXT"; break; + case EOpRayQueryGetRayTMin: out.debug << "rayQueryGetRayTMinEXT"; break; + case EOpRayQueryGetRayFlags: out.debug << "rayQueryGetRayFlagsEXT"; break; + case EOpRayQueryGetIntersectionT: out.debug << "rayQueryGetIntersectionTEXT"; break; + case EOpRayQueryGetIntersectionInstanceCustomIndex: out.debug << "rayQueryGetIntersectionInstanceCustomIndexEXT"; break; + case EOpRayQueryGetIntersectionInstanceId: out.debug << "rayQueryGetIntersectionInstanceIdEXT"; break; + case EOpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffset: out.debug << "rayQueryGetIntersectionInstanceShaderBindingTableRecordOffsetEXT"; break; + case EOpRayQueryGetIntersectionGeometryIndex: out.debug << "rayQueryGetIntersectionGeometryIndexEXT"; break; + case EOpRayQueryGetIntersectionPrimitiveIndex: out.debug << "rayQueryGetIntersectionPrimitiveIndexEXT"; break; + case EOpRayQueryGetIntersectionBarycentrics: out.debug << "rayQueryGetIntersectionBarycentricsEXT"; break; + case EOpRayQueryGetIntersectionFrontFace: out.debug << "rayQueryGetIntersectionFrontFaceEXT"; break; + case EOpRayQueryGetIntersectionCandidateAABBOpaque: out.debug << "rayQueryGetIntersectionCandidateAABBOpaqueEXT"; break; + case EOpRayQueryGetIntersectionObjectRayDirection: out.debug << "rayQueryGetIntersectionObjectRayDirectionEXT"; break; + case EOpRayQueryGetIntersectionObjectRayOrigin: out.debug << "rayQueryGetIntersectionObjectRayOriginEXT"; break; + case EOpRayQueryGetWorldRayDirection: out.debug << "rayQueryGetWorldRayDirectionEXT"; break; + case EOpRayQueryGetWorldRayOrigin: out.debug << "rayQueryGetWorldRayOriginEXT"; break; + case EOpRayQueryGetIntersectionObjectToWorld: out.debug << "rayQueryGetIntersectionObjectToWorldEXT"; break; + case EOpRayQueryGetIntersectionWorldToObject: out.debug << "rayQueryGetIntersectionWorldToObjectEXT"; break; + case EOpCooperativeMatrixLoad: out.debug << "Load cooperative matrix"; break; case EOpCooperativeMatrixStore: out.debug << "Store cooperative matrix"; break; case EOpCooperativeMatrixMulAdd: out.debug << "MulAdd cooperative matrices"; break; case EOpIsHelperInvocation: out.debug << "IsHelperInvocation"; break; + case EOpDebugPrintf: out.debug << "Debug printf"; break; default: out.debug.message(EPrefixError, "Bad aggregation op"); } @@ -1536,4 +1562,4 @@ void TIntermediate::output(TInfoSink& infoSink, bool tree) } // end namespace glslang -#endif // not GLSLANG_WEB \ No newline at end of file +#endif // !GLSLANG_WEB && !GLSLANG_ANGLE diff --git a/thirdparty/glslang/glslang/MachineIndependent/iomapper.cpp b/thirdparty/glslang/glslang/MachineIndependent/iomapper.cpp index 3262c0a2030..905cf65d6b3 100644 --- a/thirdparty/glslang/glslang/MachineIndependent/iomapper.cpp +++ b/thirdparty/glslang/glslang/MachineIndependent/iomapper.cpp @@ -33,7 +33,7 @@ // POSSIBILITY OF SUCH DAMAGE. // -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) #include "../Include/Common.h" #include "../Include/InfoSink.h" @@ -79,6 +79,11 @@ public: target = &outputList; else if (base->getQualifier().isUniformOrBuffer() && !base->getQualifier().isPushConstant()) target = &uniformList; + // If a global is being visited, then we should also traverse it incase it's evaluation + // ends up visiting inputs we want to tag as live + else if (base->getQualifier().storage == EvqGlobal) + addGlobalReference(base->getName()); + if (target) { TVarEntryInfo ent = {base->getId(), base, ! traverseAll}; ent.stage = intermediate.getStage(); @@ -161,7 +166,7 @@ struct TNotifyUniformAdaptor } private: - TNotifyUniformAdaptor& operator=(TNotifyUniformAdaptor&); + TNotifyUniformAdaptor& operator=(TNotifyUniformAdaptor&) = delete; }; struct TNotifyInOutAdaptor @@ -180,7 +185,7 @@ struct TNotifyInOutAdaptor } private: - TNotifyInOutAdaptor& operator=(TNotifyInOutAdaptor&); + TNotifyInOutAdaptor& operator=(TNotifyInOutAdaptor&) = delete; }; struct TResolverUniformAdaptor { @@ -236,7 +241,7 @@ struct TResolverUniformAdaptor { bool& error; private: - TResolverUniformAdaptor& operator=(TResolverUniformAdaptor&); + TResolverUniformAdaptor& operator=(TResolverUniformAdaptor&) = delete; }; struct TResolverInOutAdaptor { @@ -283,7 +288,7 @@ struct TResolverInOutAdaptor { bool& error; private: - TResolverInOutAdaptor& operator=(TResolverInOutAdaptor&); + TResolverInOutAdaptor& operator=(TResolverInOutAdaptor&) = delete; }; // The class is used for reserving explicit uniform locations and ubo/ssbo/opaque bindings @@ -309,26 +314,43 @@ struct TSymbolValidater TIntermSymbol* base = ent1.symbol; const TType& type = ent1.symbol->getType(); const TString& name = entKey.first; - TString mangleName1, mangleName2; - type.appendMangledName(mangleName1); EShLanguage stage = ent1.stage; + TString mangleName1, mangleName2; if (currentStage != stage) { preStage = currentStage; currentStage = stage; nextStage = EShLangCount; for (int i = currentStage + 1; i < EShLangCount; i++) { - if (inVarMaps[i] != nullptr) + if (inVarMaps[i] != nullptr) { nextStage = static_cast(i); + break; + } } } + + if (type.getQualifier().isArrayedIo(stage)) { + TType subType(type, 0); + subType.appendMangledName(mangleName1); + } else { + type.appendMangledName(mangleName1); + } + if (base->getQualifier().storage == EvqVaryingIn) { // validate stage in; if (preStage == EShLangCount) return; + if (name == "gl_PerVertex") + return; if (outVarMaps[preStage] != nullptr) { auto ent2 = outVarMaps[preStage]->find(name); if (ent2 != outVarMaps[preStage]->end()) { - ent2->second.symbol->getType().appendMangledName(mangleName2); + if (ent2->second.symbol->getType().getQualifier().isArrayedIo(preStage)) { + TType subType(ent2->second.symbol->getType(), 0); + subType.appendMangledName(mangleName2); + } + else { + ent2->second.symbol->getType().appendMangledName(mangleName2); + } if (mangleName1 == mangleName2) return; else { @@ -343,10 +365,18 @@ struct TSymbolValidater // validate stage out; if (nextStage == EShLangCount) return; + if (name == "gl_PerVertex") + return; if (outVarMaps[nextStage] != nullptr) { auto ent2 = inVarMaps[nextStage]->find(name); if (ent2 != inVarMaps[nextStage]->end()) { - ent2->second.symbol->getType().appendMangledName(mangleName2); + if (ent2->second.symbol->getType().getQualifier().isArrayedIo(nextStage)) { + TType subType(ent2->second.symbol->getType(), 0); + subType.appendMangledName(mangleName2); + } + else { + ent2->second.symbol->getType().appendMangledName(mangleName2); + } if (mangleName1 == mangleName2) return; else { @@ -384,7 +414,7 @@ struct TSymbolValidater bool& hadError; private: - TSymbolValidater& operator=(TSymbolValidater&); + TSymbolValidater& operator=(TSymbolValidater&) = delete; }; struct TSlotCollector { @@ -398,7 +428,7 @@ struct TSlotCollector { TInfoSink& infoSink; private: - TSlotCollector& operator=(TSlotCollector&); + TSlotCollector& operator=(TSlotCollector&) = delete; }; TDefaultIoResolverBase::TDefaultIoResolverBase(const TIntermediate& intermediate) @@ -579,7 +609,7 @@ TDefaultGlslIoResolver::TDefaultGlslIoResolver(const TIntermediate& intermediate int TDefaultGlslIoResolver::resolveInOutLocation(EShLanguage stage, TVarEntryInfo& ent) { const TType& type = ent.symbol->getType(); - const TString& name = ent.symbol->getName(); + const TString& name = getAccessName(ent.symbol); if (currentStage != stage) { preStage = currentStage; currentStage = stage; @@ -627,7 +657,7 @@ int TDefaultGlslIoResolver::resolveInOutLocation(EShLanguage stage, TVarEntryInf TVarSlotMap::iterator iter = storageSlotMap[resourceKey].find(name); if (iter != storageSlotMap[resourceKey].end()) { // If interface resource be found, set it has location and this symbol's new location - // equal the symbol's explicit location declarated in pre or next stage. + // equal the symbol's explicit location declaration in pre or next stage. // // vs: out vec4 a; // fs: layout(..., location = 3,...) in vec4 a; @@ -663,7 +693,7 @@ int TDefaultGlslIoResolver::resolveInOutLocation(EShLanguage stage, TVarEntryInf int TDefaultGlslIoResolver::resolveUniformLocation(EShLanguage /*stage*/, TVarEntryInfo& ent) { const TType& type = ent.symbol->getType(); - const TString& name = ent.symbol->getName(); + const TString& name = getAccessName(ent.symbol); // kick out of not doing this if (! doAutoLocationMapping()) { return ent.newLocation = -1; @@ -706,7 +736,7 @@ int TDefaultGlslIoResolver::resolveUniformLocation(EShLanguage /*stage*/, TVarEn TVarSlotMap::iterator iter = slotMap.find(name); if (iter != slotMap.end()) { // If uniform resource be found, set it has location and this symbol's new location - // equal the uniform's explicit location declarated in other stage. + // equal the uniform's explicit location declaration in other stage. // // vs: uniform vec4 a; // fs: layout(..., location = 3,...) uniform vec4 a; @@ -714,7 +744,7 @@ int TDefaultGlslIoResolver::resolveUniformLocation(EShLanguage /*stage*/, TVarEn location = iter->second; } if (! hasLocation) { - // No explicit location declaraten in other stage. + // No explicit location declaration in other stage. // So we should find a new slot for this uniform. // // vs: uniform vec4 a; @@ -723,7 +753,7 @@ int TDefaultGlslIoResolver::resolveUniformLocation(EShLanguage /*stage*/, TVarEn storageSlotMap[resourceKey][name] = location; } } else { - // the first uniform declarated in a program. + // the first uniform declaration in a program. TVarSlotMap varSlotMap; location = getFreeSlot(resourceKey, 0, size); varSlotMap[name] = location; @@ -734,8 +764,8 @@ int TDefaultGlslIoResolver::resolveUniformLocation(EShLanguage /*stage*/, TVarEn int TDefaultGlslIoResolver::resolveBinding(EShLanguage /*stage*/, TVarEntryInfo& ent) { const TType& type = ent.symbol->getType(); - const TString& name = ent.symbol->getName(); - // On OpenGL arrays of opaque types take a seperate binding for each element + const TString& name = getAccessName(ent.symbol); + // On OpenGL arrays of opaque types take a separate binding for each element int numBindings = intermediate.getSpv().openGl != 0 && type.isSizedArray() ? type.getCumulativeArraySize() : 1; TResourceType resource = getResourceType(type); // don't need to handle uniform symbol, it will be handled in resolveUniformLocation @@ -809,7 +839,7 @@ void TDefaultGlslIoResolver::endCollect(EShLanguage /*stage*/) { void TDefaultGlslIoResolver::reserverStorageSlot(TVarEntryInfo& ent, TInfoSink& infoSink) { const TType& type = ent.symbol->getType(); - const TString& name = ent.symbol->getName(); + const TString& name = getAccessName(ent.symbol); TStorageQualifier storage = type.getQualifier().storage; EShLanguage stage(EShLangCount); switch (storage) { @@ -831,6 +861,7 @@ void TDefaultGlslIoResolver::reserverStorageSlot(TVarEntryInfo& ent, TInfoSink& if (iter->second != location) { TString errorMsg = "Invalid location: " + name; infoSink.info.message(EPrefixInternalError, errorMsg.c_str()); + hasError = true; } } } @@ -856,6 +887,7 @@ void TDefaultGlslIoResolver::reserverStorageSlot(TVarEntryInfo& ent, TInfoSink& if (iter->second != location) { TString errorMsg = "Invalid location: " + name; infoSink.info.message(EPrefixInternalError, errorMsg.c_str()); + hasError = true; } } } @@ -867,7 +899,7 @@ void TDefaultGlslIoResolver::reserverStorageSlot(TVarEntryInfo& ent, TInfoSink& void TDefaultGlslIoResolver::reserverResourceSlot(TVarEntryInfo& ent, TInfoSink& infoSink) { const TType& type = ent.symbol->getType(); - const TString& name = ent.symbol->getName(); + const TString& name = getAccessName(ent.symbol); int resource = getResourceType(type); if (type.getQualifier().hasBinding()) { TVarSlotMap& varSlotMap = resourceSlotMap[resource]; @@ -884,11 +916,19 @@ void TDefaultGlslIoResolver::reserverResourceSlot(TVarEntryInfo& ent, TInfoSink& if (iter->second != binding) { TString errorMsg = "Invalid binding: " + name; infoSink.info.message(EPrefixInternalError, errorMsg.c_str()); + hasError = true; } } } } +const TString& TDefaultGlslIoResolver::getAccessName(const TIntermSymbol* symbol) +{ + return symbol->getBasicType() == EbtBlock ? + symbol->getType().getTypeName() : + symbol->getName(); +} + //TDefaultGlslIoResolver end /* @@ -1070,11 +1110,12 @@ bool TIoMapper::addStage(EShLanguage stage, TIntermediate& intermediate, TInfoSi TVarGatherTraverser iter_binding_live(intermediate, false, inVarMap, outVarMap, uniformVarMap); root->traverse(&iter_binding_all); iter_binding_live.pushFunction(intermediate.getEntryPointMangledName().c_str()); - while (! iter_binding_live.functions.empty()) { - TIntermNode* function = iter_binding_live.functions.back(); - iter_binding_live.functions.pop_back(); - function->traverse(&iter_binding_live); + while (! iter_binding_live.destinations.empty()) { + TIntermNode* destination = iter_binding_live.destinations.back(); + iter_binding_live.destinations.pop_back(); + destination->traverse(&iter_binding_live); } + // sort entries by priority. see TVarEntryInfo::TOrderByPriority for info. std::for_each(inVarMap.begin(), inVarMap.end(), [&inVector](TVarLivePair p) { inVector.push_back(p); }); @@ -1158,18 +1199,19 @@ bool TGlslIoMapper::addStage(EShLanguage stage, TIntermediate& intermediate, TIn resolver = &defaultResolver; } resolver->addStage(stage); - inVarMaps[stage] = new TVarLiveMap, outVarMaps[stage] = new TVarLiveMap(), uniformVarMap[stage] = new TVarLiveMap(); + inVarMaps[stage] = new TVarLiveMap(); outVarMaps[stage] = new TVarLiveMap(); uniformVarMap[stage] = new TVarLiveMap(); TVarGatherTraverser iter_binding_all(intermediate, true, *inVarMaps[stage], *outVarMaps[stage], *uniformVarMap[stage]); TVarGatherTraverser iter_binding_live(intermediate, false, *inVarMaps[stage], *outVarMaps[stage], *uniformVarMap[stage]); root->traverse(&iter_binding_all); iter_binding_live.pushFunction(intermediate.getEntryPointMangledName().c_str()); - while (! iter_binding_live.functions.empty()) { - TIntermNode* function = iter_binding_live.functions.back(); - iter_binding_live.functions.pop_back(); - function->traverse(&iter_binding_live); + while (! iter_binding_live.destinations.empty()) { + TIntermNode* destination = iter_binding_live.destinations.back(); + iter_binding_live.destinations.pop_back(); + destination->traverse(&iter_binding_live); } + TNotifyInOutAdaptor inOutNotify(stage, *resolver); TNotifyUniformAdaptor uniformNotify(stage, *resolver); // Resolve current stage input symbol location with previous stage output here, @@ -1246,4 +1288,4 @@ bool TGlslIoMapper::doMap(TIoMapResolver* resolver, TInfoSink& infoSink) { } // end namespace glslang -#endif // GLSLANG_WEB +#endif // !GLSLANG_WEB && !GLSLANG_ANGLE diff --git a/thirdparty/glslang/glslang/MachineIndependent/iomapper.h b/thirdparty/glslang/glslang/MachineIndependent/iomapper.h index 684e88d5710..674132786e4 100644 --- a/thirdparty/glslang/glslang/MachineIndependent/iomapper.h +++ b/thirdparty/glslang/glslang/MachineIndependent/iomapper.h @@ -33,7 +33,7 @@ // POSSIBILITY OF SUCH DAMAGE. // -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) #ifndef _IOMAPPER_INCLUDED #define _IOMAPPER_INCLUDED @@ -129,6 +129,7 @@ public: uint32_t computeTypeLocationSize(const TType& type, EShLanguage stage); TSlotSetMap slots; + bool hasError = false; protected: TDefaultIoResolverBase(TDefaultIoResolverBase&); @@ -185,7 +186,7 @@ protected: } }; -// Defaulf I/O resolver for OpenGL +// Default I/O resolver for OpenGL struct TDefaultGlslIoResolver : public TDefaultIoResolverBase { public: typedef std::map TVarSlotMap; // @@ -202,6 +203,7 @@ public: void endCollect(EShLanguage) override; void reserverStorageSlot(TVarEntryInfo& ent, TInfoSink& infoSink) override; void reserverResourceSlot(TVarEntryInfo& ent, TInfoSink& infoSink) override; + const TString& getAccessName(const TIntermSymbol*); // in/out symbol and uniform symbol are stored in the same resourceSlotMap, the storage key is used to identify each type of symbol. // We use stage and storage qualifier to construct a storage key. it can help us identify the same storage resource used in different stage. // if a resource is a program resource and we don't need know it usage stage, we can use same stage to build storage key. @@ -237,12 +239,13 @@ typedef std::map TVarLiveMap; // In the future, if the vc++ compiler can handle such a situation, // this part of the code will be removed. struct TVarLivePair : std::pair { - TVarLivePair(std::pair& _Right) : pair(_Right.first, _Right.second) {} + TVarLivePair(const std::pair& _Right) : pair(_Right.first, _Right.second) {} TVarLivePair& operator=(const TVarLivePair& _Right) { const_cast(first) = _Right.first; second = _Right.second; return (*this); } + TVarLivePair(const TVarLivePair& src) : pair(src) { } }; typedef std::vector TVarLiveVector; @@ -260,10 +263,10 @@ public: class TGlslIoMapper : public TIoMapper { public: TGlslIoMapper() { - memset(inVarMaps, 0, sizeof(TVarLiveMap*) * (EShLangCount + 1)); - memset(outVarMaps, 0, sizeof(TVarLiveMap*) * (EShLangCount + 1)); - memset(uniformVarMap, 0, sizeof(TVarLiveMap*) * (EShLangCount + 1)); - memset(intermediates, 0, sizeof(TIntermediate*) * (EShLangCount + 1)); + memset(inVarMaps, 0, sizeof(TVarLiveMap*) * EShLangCount); + memset(outVarMaps, 0, sizeof(TVarLiveMap*) * EShLangCount); + memset(uniformVarMap, 0, sizeof(TVarLiveMap*) * EShLangCount); + memset(intermediates, 0, sizeof(TIntermediate*) * EShLangCount); } virtual ~TGlslIoMapper() { for (size_t stage = 0; stage < EShLangCount; stage++) { @@ -296,4 +299,4 @@ public: #endif // _IOMAPPER_INCLUDED -#endif // GLSLANG_WEB +#endif // !GLSLANG_WEB && !GLSLANG_ANGLE diff --git a/thirdparty/glslang/glslang/MachineIndependent/linkValidate.cpp b/thirdparty/glslang/glslang/MachineIndependent/linkValidate.cpp index fe51ec93ff1..a8e031bc6f6 100644 --- a/thirdparty/glslang/glslang/MachineIndependent/linkValidate.cpp +++ b/thirdparty/glslang/glslang/MachineIndependent/linkValidate.cpp @@ -82,7 +82,7 @@ void TIntermediate::warn(TInfoSink& infoSink, const char* message) // void TIntermediate::merge(TInfoSink& infoSink, TIntermediate& unit) { -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) mergeCallGraphs(infoSink, unit); mergeModes(infoSink, unit); mergeTrees(infoSink, unit); @@ -104,7 +104,7 @@ void TIntermediate::mergeCallGraphs(TInfoSink& infoSink, TIntermediate& unit) callGraph.insert(callGraph.end(), unit.callGraph.begin(), unit.callGraph.end()); } -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) #define MERGE_MAX(member) member = std::max(member, unit.member) #define MERGE_TRUE(member) if (unit.member) member = unit.member; @@ -138,7 +138,11 @@ void TIntermediate::mergeModes(TInfoSink& infoSink, TIntermediate& unit) MERGE_MAX(spvVersion.openGl); numErrors += unit.getNumErrors(); - numPushConstants += unit.numPushConstants; + // Only one push_constant is allowed, mergeLinkerObjects() will ensure the push_constant + // is the same for all units. + if (numPushConstants > 1 || unit.numPushConstants > 1) + error(infoSink, "Only one push_constant block is allowed per stage"); + numPushConstants = std::min(numPushConstants + unit.numPushConstants, 1); if (unit.invocations != TQualifier::layoutNotSet) { if (invocations == TQualifier::layoutNotSet) @@ -149,7 +153,7 @@ void TIntermediate::mergeModes(TInfoSink& infoSink, TIntermediate& unit) if (vertices == TQualifier::layoutNotSet) vertices = unit.vertices; - else if (vertices != unit.vertices) { + else if (unit.vertices != TQualifier::layoutNotSet && vertices != unit.vertices) { if (language == EShLangGeometry || language == EShLangMeshNV) error(infoSink, "Contradictory layout max_vertices values"); else if (language == EShLangTessControl) @@ -168,12 +172,12 @@ void TIntermediate::mergeModes(TInfoSink& infoSink, TIntermediate& unit) if (inputPrimitive == ElgNone) inputPrimitive = unit.inputPrimitive; - else if (inputPrimitive != unit.inputPrimitive) + else if (unit.inputPrimitive != ElgNone && inputPrimitive != unit.inputPrimitive) error(infoSink, "Contradictory input layout primitives"); if (outputPrimitive == ElgNone) outputPrimitive = unit.outputPrimitive; - else if (outputPrimitive != unit.outputPrimitive) + else if (unit.outputPrimitive != ElgNone && outputPrimitive != unit.outputPrimitive) error(infoSink, "Contradictory output layout primitives"); if (originUpperLeft != unit.originUpperLeft || pixelCenterInteger != unit.pixelCenterInteger) @@ -286,7 +290,7 @@ void TIntermediate::mergeTrees(TInfoSink& infoSink, TIntermediate& unit) } // Getting this far means we have two existing trees to merge... - numShaderRecordNVBlocks += unit.numShaderRecordNVBlocks; + numShaderRecordBlocks += unit.numShaderRecordBlocks; numTaskNVBlocks += unit.numTaskNVBlocks; // Get the top-level globals of each unit @@ -299,10 +303,10 @@ void TIntermediate::mergeTrees(TInfoSink& infoSink, TIntermediate& unit) // Map by global name to unique ID to rationalize the same object having // differing IDs in different trees. - TMap idMap; + TIdMaps idMaps; int maxId; - seedIdMap(idMap, maxId); - remapIds(idMap, maxId + 1, unit); + seedIdMap(idMaps, maxId); + remapIds(idMaps, maxId + 1, unit); mergeBodies(infoSink, globals, unitGlobals); mergeLinkerObjects(infoSink, linkerObjects, unitLinkerObjects); @@ -311,27 +315,40 @@ void TIntermediate::mergeTrees(TInfoSink& infoSink, TIntermediate& unit) #endif +static const TString& getNameForIdMap(TIntermSymbol* symbol) +{ + TShaderInterface si = symbol->getType().getShaderInterface(); + if (si == EsiNone) + return symbol->getName(); + else + return symbol->getType().getTypeName(); +} + + + // Traverser that seeds an ID map with all built-ins, and tracks the // maximum ID used. // (It would be nice to put this in a function, but that causes warnings // on having no bodies for the copy-constructor/operator=.) class TBuiltInIdTraverser : public TIntermTraverser { public: - TBuiltInIdTraverser(TMap& idMap) : idMap(idMap), maxId(0) { } + TBuiltInIdTraverser(TIdMaps& idMaps) : idMaps(idMaps), maxId(0) { } // If it's a built in, add it to the map. // Track the max ID. virtual void visitSymbol(TIntermSymbol* symbol) { const TQualifier& qualifier = symbol->getType().getQualifier(); - if (qualifier.builtIn != EbvNone) - idMap[symbol->getName()] = symbol->getId(); + if (qualifier.builtIn != EbvNone) { + TShaderInterface si = symbol->getType().getShaderInterface(); + idMaps[si][getNameForIdMap(symbol)] = symbol->getId(); + } maxId = std::max(maxId, symbol->getId()); } int getMaxId() const { return maxId; } protected: TBuiltInIdTraverser(TBuiltInIdTraverser&); TBuiltInIdTraverser& operator=(TBuiltInIdTraverser&); - TMap& idMap; + TIdMaps& idMaps; int maxId; }; @@ -340,31 +357,33 @@ protected: // on having no bodies for the copy-constructor/operator=.) class TUserIdTraverser : public TIntermTraverser { public: - TUserIdTraverser(TMap& idMap) : idMap(idMap) { } + TUserIdTraverser(TIdMaps& idMaps) : idMaps(idMaps) { } // If its a non-built-in global, add it to the map. virtual void visitSymbol(TIntermSymbol* symbol) { const TQualifier& qualifier = symbol->getType().getQualifier(); - if (qualifier.builtIn == EbvNone) - idMap[symbol->getName()] = symbol->getId(); + if (qualifier.builtIn == EbvNone) { + TShaderInterface si = symbol->getType().getShaderInterface(); + idMaps[si][getNameForIdMap(symbol)] = symbol->getId(); + } } protected: TUserIdTraverser(TUserIdTraverser&); TUserIdTraverser& operator=(TUserIdTraverser&); - TMap& idMap; // over biggest id + TIdMaps& idMaps; // over biggest id }; // Initialize the the ID map with what we know of 'this' AST. -void TIntermediate::seedIdMap(TMap& idMap, int& maxId) +void TIntermediate::seedIdMap(TIdMaps& idMaps, int& maxId) { // all built-ins everywhere need to align on IDs and contribute to the max ID - TBuiltInIdTraverser builtInIdTraverser(idMap); + TBuiltInIdTraverser builtInIdTraverser(idMaps); treeRoot->traverse(&builtInIdTraverser); maxId = builtInIdTraverser.getMaxId(); // user variables in the linker object list need to align on ids - TUserIdTraverser userIdTraverser(idMap); + TUserIdTraverser userIdTraverser(idMaps); findLinkerObjects()->traverse(&userIdTraverser); } @@ -373,7 +392,7 @@ void TIntermediate::seedIdMap(TMap& idMap, int& maxId) // on having no bodies for the copy-constructor/operator=.) class TRemapIdTraverser : public TIntermTraverser { public: - TRemapIdTraverser(const TMap& idMap, int idShift) : idMap(idMap), idShift(idShift) { } + TRemapIdTraverser(const TIdMaps& idMaps, int idShift) : idMaps(idMaps), idShift(idShift) { } // Do the mapping: // - if the same symbol, adopt the 'this' ID // - otherwise, ensure a unique ID by shifting to a new space @@ -382,8 +401,9 @@ public: const TQualifier& qualifier = symbol->getType().getQualifier(); bool remapped = false; if (qualifier.isLinkable() || qualifier.builtIn != EbvNone) { - auto it = idMap.find(symbol->getName()); - if (it != idMap.end()) { + TShaderInterface si = symbol->getType().getShaderInterface(); + auto it = idMaps[si].find(getNameForIdMap(symbol)); + if (it != idMaps[si].end()) { symbol->changeId(it->second); remapped = true; } @@ -394,14 +414,14 @@ public: protected: TRemapIdTraverser(TRemapIdTraverser&); TRemapIdTraverser& operator=(TRemapIdTraverser&); - const TMap& idMap; + const TIdMaps& idMaps; int idShift; }; -void TIntermediate::remapIds(const TMap& idMap, int idShift, TIntermediate& unit) +void TIntermediate::remapIds(const TIdMaps& idMaps, int idShift, TIntermediate& unit) { // Remap all IDs to either share or be unique, as dictated by the idMap and idShift. - TRemapIdTraverser idTraverser(idMap, idShift); + TRemapIdTraverser idTraverser(idMaps, idShift); unit.getTreeRoot()->traverse(&idTraverser); } @@ -443,7 +463,19 @@ void TIntermediate::mergeLinkerObjects(TInfoSink& infoSink, TIntermSequence& lin TIntermSymbol* symbol = linkerObjects[linkObj]->getAsSymbolNode(); TIntermSymbol* unitSymbol = unitLinkerObjects[unitLinkObj]->getAsSymbolNode(); assert(symbol && unitSymbol); - if (symbol->getName() == unitSymbol->getName()) { + + bool isSameSymbol = false; + // If they are both blocks in the same shader interface, + // match by the block-name, not the identifier name. + if (symbol->getType().getBasicType() == EbtBlock && unitSymbol->getType().getBasicType() == EbtBlock) { + if (symbol->getType().getShaderInterface() == unitSymbol->getType().getShaderInterface()) { + isSameSymbol = symbol->getType().getTypeName() == unitSymbol->getType().getTypeName(); + } + } + else if (symbol->getName() == unitSymbol->getName()) + isSameSymbol = true; + + if (isSameSymbol) { // filter out copy merge = false; @@ -462,6 +494,9 @@ void TIntermediate::mergeLinkerObjects(TInfoSink& infoSink, TIntermSequence& lin // Check for consistent types/qualification/initializers etc. mergeErrorCheck(infoSink, *symbol, *unitSymbol, false); } + // If different symbols, verify they arn't push_constant since there can only be one per stage + else if (symbol->getQualifier().isPushConstant() && unitSymbol->getQualifier().isPushConstant()) + error(infoSink, "Only one push_constant block is allowed per stage"); } if (merge) linkerObjects.push_back(unitLinkerObjects[unitLinkObj]); @@ -498,7 +533,7 @@ void TIntermediate::mergeImplicitArraySizes(TType& type, const TType& unitType) // void TIntermediate::mergeErrorCheck(TInfoSink& infoSink, const TIntermSymbol& symbol, const TIntermSymbol& unitSymbol, bool crossStage) { -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) bool writeTypeComparison = false; // Types have to match @@ -520,6 +555,22 @@ void TIntermediate::mergeErrorCheck(TInfoSink& infoSink, const TIntermSymbol& sy writeTypeComparison = true; } + // Uniform and buffer blocks must either both have an instance name, or + // must both be anonymous. The names don't need to match though. + if (symbol.getQualifier().isUniformOrBuffer() && + (IsAnonymous(symbol.getName()) != IsAnonymous(unitSymbol.getName()))) { + error(infoSink, "Matched Uniform or Storage blocks must all be anonymous," + " or all be named:"); + writeTypeComparison = true; + } + + if (symbol.getQualifier().storage == unitSymbol.getQualifier().storage && + (IsAnonymous(symbol.getName()) != IsAnonymous(unitSymbol.getName()) || + (!IsAnonymous(symbol.getName()) && symbol.getName() != unitSymbol.getName()))) { + warn(infoSink, "Matched shader interfaces are using different instance names."); + writeTypeComparison = true; + } + // Precision... if (symbol.getQualifier().precision != unitSymbol.getQualifier().precision) { error(infoSink, "Precision qualifiers must match:"); @@ -555,6 +606,7 @@ void TIntermediate::mergeErrorCheck(TInfoSink& infoSink, const TIntermSymbol& sy symbol.getQualifier().queuefamilycoherent != unitSymbol.getQualifier().queuefamilycoherent || symbol.getQualifier().workgroupcoherent != unitSymbol.getQualifier().workgroupcoherent || symbol.getQualifier().subgroupcoherent != unitSymbol.getQualifier().subgroupcoherent || + symbol.getQualifier().shadercallcoherent!= unitSymbol.getQualifier().shadercallcoherent || symbol.getQualifier().nonprivate != unitSymbol.getQualifier().nonprivate || symbol.getQualifier().volatil != unitSymbol.getQualifier().volatil || symbol.getQualifier().restrict != unitSymbol.getQualifier().restrict || @@ -589,9 +641,13 @@ void TIntermediate::mergeErrorCheck(TInfoSink& infoSink, const TIntermSymbol& sy } } - if (writeTypeComparison) - infoSink.info << " " << symbol.getName() << ": \"" << symbol.getType().getCompleteString() << "\" versus \"" << - unitSymbol.getType().getCompleteString() << "\"\n"; + if (writeTypeComparison) { + infoSink.info << " " << symbol.getName() << ": \"" << symbol.getType().getCompleteString() << "\" versus "; + if (symbol.getName() != unitSymbol.getName()) + infoSink.info << unitSymbol.getName() << ": "; + + infoSink.info << "\"" << unitSymbol.getType().getCompleteString() << "\"\n"; + } #endif } @@ -721,13 +777,13 @@ void TIntermediate::finalCheck(TInfoSink& infoSink, bool keepUncalled) break; case EShLangCompute: break; - case EShLangRayGenNV: - case EShLangIntersectNV: - case EShLangAnyHitNV: - case EShLangClosestHitNV: - case EShLangMissNV: - case EShLangCallableNV: - if (numShaderRecordNVBlocks > 1) + case EShLangRayGen: + case EShLangIntersect: + case EShLangAnyHit: + case EShLangClosestHit: + case EShLangMiss: + case EShLangCallable: + if (numShaderRecordBlocks > 1) error(infoSink, "Only one shaderRecordNV buffer block is allowed per stage"); break; case EShLangMeshNV: @@ -1306,9 +1362,9 @@ unsigned int TIntermediate::computeTypeXfbSize(const TType& type, bool& contains // that component's size. Aggregate types are flattened down to the component // level to get this sequence of components." - if (type.isArray()) { + if (type.isSizedArray()) { // TODO: perf: this can be flattened by using getCumulativeArraySize(), and a deref that discards all arrayness - assert(type.isSizedArray()); + // Unsized array use to xfb should be a compile error. TType elementType(type, 0); return type.getOuterArraySize() * computeTypeXfbSize(elementType, contains64BitType, contains16BitType, contains16BitType); } @@ -1494,7 +1550,9 @@ int TIntermediate::getBaseAlignment(const TType& type, int& size, int& stride, T RoundToPow2(size, alignment); stride = size; // uses full matrix size for stride of an array of matrices (not quite what rule 6/8, but what's expected) // uses the assumption for rule 10 in the comment above - size = stride * type.getOuterArraySize(); + // use one element to represent the last member of SSBO which is unsized array + int arraySize = (type.isUnsizedArray() && (type.getOuterArraySize() == 0)) ? 1 : type.getOuterArraySize(); + size = stride * arraySize; return alignment; } diff --git a/thirdparty/glslang/glslang/MachineIndependent/localintermediate.h b/thirdparty/glslang/glslang/MachineIndependent/localintermediate.h index 683290af74f..3cdc1f10cf3 100644 --- a/thirdparty/glslang/glslang/MachineIndependent/localintermediate.h +++ b/thirdparty/glslang/glslang/MachineIndependent/localintermediate.h @@ -162,7 +162,10 @@ struct TXfbBuffer { #endif // Track a set of strings describing how the module was processed. -// Using the form: +// This includes command line options, transforms, etc., ideally inclusive enough +// to reproduce the steps used to transform the input source to the output. +// E.g., see SPIR-V OpModuleProcessed. +// Each "process" or "transform" uses is expressed in the form: // process arg0 arg1 arg2 ... // process arg0 arg1 arg2 ... // where everything is textual, and there can be zero or more arguments @@ -222,6 +225,40 @@ enum ComputeDerivativeMode { LayoutDerivativeGroupLinear, // derivative_group_linearNV }; +class TIdMaps { +public: + TMap& operator[](int i) { return maps[i]; } + const TMap& operator[](int i) const { return maps[i]; } +private: + TMap maps[EsiCount]; +}; + +class TNumericFeatures { +public: + TNumericFeatures() : features(0) { } + TNumericFeatures(const TNumericFeatures&) = delete; + TNumericFeatures& operator=(const TNumericFeatures&) = delete; + typedef enum : unsigned int { + shader_explicit_arithmetic_types = 1 << 0, + shader_explicit_arithmetic_types_int8 = 1 << 1, + shader_explicit_arithmetic_types_int16 = 1 << 2, + shader_explicit_arithmetic_types_int32 = 1 << 3, + shader_explicit_arithmetic_types_int64 = 1 << 4, + shader_explicit_arithmetic_types_float16 = 1 << 5, + shader_explicit_arithmetic_types_float32 = 1 << 6, + shader_explicit_arithmetic_types_float64 = 1 << 7, + shader_implicit_conversions = 1 << 8, + gpu_shader_fp64 = 1 << 9, + gpu_shader_int16 = 1 << 10, + gpu_shader_half_float = 1 << 11, + } feature; + void insert(feature f) { features |= f; } + void erase(feature f) { features &= ~f; } + bool contains(feature f) const { return (features & f) != 0; } +private: + unsigned int features; +}; + // // Set of helper functions to help parse and build the tree. // @@ -229,7 +266,10 @@ class TIntermediate { public: explicit TIntermediate(EShLanguage l, int v = 0, EProfile p = ENoProfile) : language(l), - profile(p), version(v), treeRoot(0), +#ifndef GLSLANG_ANGLE + profile(p), version(v), +#endif + treeRoot(0), numEntryPoints(0), numErrors(0), numPushConstants(0), recursive(false), invertY(false), useStorageBuffer(false), @@ -244,15 +284,16 @@ public: inputPrimitive(ElgNone), outputPrimitive(ElgNone), pixelCenterInteger(false), originUpperLeft(false), vertexSpacing(EvsNone), vertexOrder(EvoNone), interlockOrdering(EioNone), pointMode(false), earlyFragmentTests(false), - postDepthCoverage(false), depthLayout(EldNone), + postDepthCoverage(false), depthLayout(EldNone), hlslFunctionality1(false), blendEquations(0), xfbMode(false), multiStream(false), layoutOverrideCoverage(false), geoPassthroughEXT(false), - numShaderRecordNVBlocks(0), + numShaderRecordBlocks(0), computeDerivativeMode(LayoutDerivativeNone), primitives(TQualifier::layoutNotSet), numTaskNVBlocks(0), + layoutPrimitiveCulling(false), autoMapBindings(false), autoMapLocations(false), flattenUniformArrays(false), @@ -282,9 +323,20 @@ public: #endif } - void setVersion(int v) { version = v; } + void setVersion(int v) + { +#ifndef GLSLANG_ANGLE + version = v; +#endif + } + void setProfile(EProfile p) + { +#ifndef GLSLANG_ANGLE + profile = p; +#endif + } + int getVersion() const { return version; } - void setProfile(EProfile p) { profile = p; } EProfile getProfile() const { return profile; } void setSpv(const SpvVersion& s) { @@ -380,7 +432,7 @@ public: void setSource(EShSource s) { source = s; } EShSource getSource() const { return source; } #else - void setSource(EShSource s) { assert(s == EShSourceGlsl); } + void setSource(EShSource s) { assert(s == EShSourceGlsl); (void)s; } EShSource getSource() const { return EShSourceGlsl; } #endif @@ -391,15 +443,15 @@ public: TIntermSymbol* addSymbol(const TType&, const TSourceLoc&); TIntermSymbol* addSymbol(const TIntermSymbol&); TIntermTyped* addConversion(TOperator, const TType&, TIntermTyped*); - std::tuple addConversion(TOperator op, TIntermTyped* node0, TIntermTyped* node1); + std::tuple addPairConversion(TOperator op, TIntermTyped* node0, TIntermTyped* node1); TIntermTyped* addUniShapeConversion(TOperator, const TType&, TIntermTyped*); TIntermTyped* addConversion(TBasicType convertTo, TIntermTyped* node) const; void addBiShapeConversion(TOperator, TIntermTyped*& lhsNode, TIntermTyped*& rhsNode); TIntermTyped* addShapeConversion(const TType&, TIntermTyped*); - TIntermTyped* addBinaryMath(TOperator, TIntermTyped* left, TIntermTyped* right, TSourceLoc); - TIntermTyped* addAssign(TOperator op, TIntermTyped* left, TIntermTyped* right, TSourceLoc); - TIntermTyped* addIndex(TOperator op, TIntermTyped* base, TIntermTyped* index, TSourceLoc); - TIntermTyped* addUnaryMath(TOperator, TIntermTyped* child, TSourceLoc); + TIntermTyped* addBinaryMath(TOperator, TIntermTyped* left, TIntermTyped* right, const TSourceLoc&); + TIntermTyped* addAssign(TOperator op, TIntermTyped* left, TIntermTyped* right, const TSourceLoc&); + TIntermTyped* addIndex(TOperator op, TIntermTyped* base, TIntermTyped* index, const TSourceLoc&); + TIntermTyped* addUnaryMath(TOperator, TIntermTyped* child, const TSourceLoc&); TIntermTyped* addBuiltInFunctionCall(const TSourceLoc& line, TOperator, bool unary, TIntermNode*, const TType& returnType); bool canImplicitlyPromote(TBasicType from, TBasicType to, TOperator op = EOpNull) const; bool isIntegralPromotion(TBasicType from, TBasicType to) const; @@ -413,7 +465,7 @@ public: TIntermAggregate* makeAggregate(TIntermNode* node); TIntermAggregate* makeAggregate(TIntermNode* node, const TSourceLoc&); TIntermAggregate* makeAggregate(const TSourceLoc&); - TIntermTyped* setAggregateOperator(TIntermNode*, TOperator, const TType& type, TSourceLoc); + TIntermTyped* setAggregateOperator(TIntermNode*, TOperator, const TType& type, const TSourceLoc&); bool areAllChildConst(TIntermAggregate* aggrNode); TIntermSelection* addSelection(TIntermTyped* cond, TIntermNodePair code, const TSourceLoc&); TIntermTyped* addSelection(TIntermTyped* cond, TIntermTyped* trueBlock, TIntermTyped* falseBlock, const TSourceLoc&); @@ -442,10 +494,11 @@ public: // Low level functions to add nodes (no conversions or other higher level transformations) // If a type is provided, the node's type will be set to it. - TIntermBinary* addBinaryNode(TOperator op, TIntermTyped* left, TIntermTyped* right, TSourceLoc) const; - TIntermBinary* addBinaryNode(TOperator op, TIntermTyped* left, TIntermTyped* right, TSourceLoc, const TType&) const; - TIntermUnary* addUnaryNode(TOperator op, TIntermTyped* child, TSourceLoc) const; - TIntermUnary* addUnaryNode(TOperator op, TIntermTyped* child, TSourceLoc, const TType&) const; + TIntermBinary* addBinaryNode(TOperator op, TIntermTyped* left, TIntermTyped* right, const TSourceLoc&) const; + TIntermBinary* addBinaryNode(TOperator op, TIntermTyped* left, TIntermTyped* right, const TSourceLoc&, + const TType&) const; + TIntermUnary* addUnaryNode(TOperator op, TIntermTyped* child, const TSourceLoc&) const; + TIntermUnary* addUnaryNode(TOperator op, TIntermTyped* child, const TSourceLoc&, const TType&) const; // Constant folding (in Constant.cpp) TIntermTyped* fold(TIntermAggregate* aggrNode); @@ -460,11 +513,7 @@ public: void addSymbolLinkageNodes(TIntermAggregate*& linkage, EShLanguage, TSymbolTable&); void addSymbolLinkageNode(TIntermAggregate*& linkage, const TSymbol&); - void setUseStorageBuffer() - { - useStorageBuffer = true; - processes.addProcess("use-storage-buffer"); - } + void setUseStorageBuffer() { useStorageBuffer = true; } bool usingStorageBuffer() const { return useStorageBuffer; } void setDepthReplacing() { depthReplacing = true; } bool isDepthReplacing() const { return depthReplacing; } @@ -503,7 +552,7 @@ public: bool getAutoMapBindings() const { return false; } bool getAutoMapLocations() const { return false; } int getNumPushConstants() const { return 0; } - void addShaderRecordNVCount() { } + void addShaderRecordCount() { } void addTaskNVCount() { } void setUseVulkanMemoryModel() { } bool usingVulkanMemoryModel() const { return false; } @@ -583,7 +632,7 @@ public: processes.addProcess("flatten-uniform-arrays"); } bool getFlattenUniformArrays() const { return flattenUniformArrays; } -#endif +#endif void setNoStorageFormat(bool b) { useUnknownFormat = b; @@ -620,7 +669,7 @@ public: void setTextureSamplerTransformMode(EShTextureSamplerTransformMode mode) { textureSamplerTransformMode = mode; } int getNumPushConstants() const { return numPushConstants; } - void addShaderRecordNVCount() { ++numShaderRecordNVBlocks; } + void addShaderRecordCount() { ++numShaderRecordBlocks; } void addTaskNVCount() { ++numTaskNVBlocks; } bool setInvocations(int i) @@ -723,6 +772,8 @@ public: void setLayoutDerivativeMode(ComputeDerivativeMode mode) { computeDerivativeMode = mode; } bool hasLayoutDerivativeModeNone() const { return computeDerivativeMode != LayoutDerivativeNone; } ComputeDerivativeMode getLayoutDerivativeModeNone() const { return computeDerivativeMode; } + void setLayoutPrimitiveCulling() { layoutPrimitiveCulling = true; } + bool getLayoutPrimitiveCulling() const { return layoutPrimitiveCulling; } bool setPrimitives(int m) { if (primitives != TQualifier::layoutNotSet) @@ -834,22 +885,25 @@ public: bool getArithemeticInt8Enabled() const { return false; } bool getArithemeticInt16Enabled() const { return false; } bool getArithemeticFloat16Enabled() const { return false; } + void updateNumericFeature(TNumericFeatures::feature f, bool on) { } #else bool getArithemeticInt8Enabled() const { - return extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types) || - extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types_int8); + return numericFeatures.contains(TNumericFeatures::shader_explicit_arithmetic_types) || + numericFeatures.contains(TNumericFeatures::shader_explicit_arithmetic_types_int8); } bool getArithemeticInt16Enabled() const { - return extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types) || - extensionRequested(E_GL_AMD_gpu_shader_int16) || - extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types_int16); + return numericFeatures.contains(TNumericFeatures::shader_explicit_arithmetic_types) || + numericFeatures.contains(TNumericFeatures::gpu_shader_int16) || + numericFeatures.contains(TNumericFeatures::shader_explicit_arithmetic_types_int16); } bool getArithemeticFloat16Enabled() const { - return extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types) || - extensionRequested(E_GL_AMD_gpu_shader_half_float) || - extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types_float16); + return numericFeatures.contains(TNumericFeatures::shader_explicit_arithmetic_types) || + numericFeatures.contains(TNumericFeatures::gpu_shader_half_float) || + numericFeatures.contains(TNumericFeatures::shader_explicit_arithmetic_types_float16); } + void updateNumericFeature(TNumericFeatures::feature f, bool on) + { on ? numericFeatures.insert(f) : numericFeatures.erase(f); } #endif protected: @@ -859,8 +913,8 @@ protected: void mergeCallGraphs(TInfoSink&, TIntermediate&); void mergeModes(TInfoSink&, TIntermediate&); void mergeTrees(TInfoSink&, TIntermediate&); - void seedIdMap(TMap& idMap, int& maxId); - void remapIds(const TMap& idMap, int idShift, TIntermediate&); + void seedIdMap(TIdMaps& idMaps, int& maxId); + void remapIds(const TIdMaps& idMaps, int idShift, TIntermediate&); void mergeBodies(TInfoSink&, TIntermSequence& globals, const TIntermSequence& unitGlobals); void mergeLinkerObjects(TInfoSink&, TIntermSequence& linkerObjects, const TIntermSequence& unitLinkerObjects); void mergeImplicitArraySizes(TType&, const TType&); @@ -881,17 +935,7 @@ protected: bool specConstantPropagates(const TIntermTyped&, const TIntermTyped&); void performTextureUpgradeAndSamplerRemovalTransformation(TIntermNode* root); bool isConversionAllowed(TOperator op, TIntermTyped* node) const; - std::tuple getConversionDestinatonType(TBasicType type0, TBasicType type1, TOperator op) const; - - // JohnK: I think this function should go away. - // This data structure is just a log to pass on to back ends. - // Versioning and extensions are handled in Version.cpp, with a rich - // set of functions for querying stages, versions, extension enable/disabled, etc. -#ifdef GLSLANG_WEB - bool extensionRequested(const char *extension) const { return false; } -#else - bool extensionRequested(const char *extension) const {return requestedExtensions.find(extension) != requestedExtensions.end();} -#endif + std::tuple getConversionDestinationType(TBasicType type0, TBasicType type1, TOperator op) const; static const char* getResourceName(TResourceType); @@ -901,8 +945,13 @@ protected: typedef std::list TGraph; TGraph callGraph; +#ifdef GLSLANG_ANGLE + const EProfile profile = ECoreProfile; + const int version = 450; +#else EProfile profile; // source profile int version; // source version +#endif SpvVersion spvVersion; TIntermNode* treeRoot; std::set requestedExtensions; // cumulation of all enabled or required extensions; not connected to what subset of the shader used them @@ -945,10 +994,11 @@ protected: bool multiStream; bool layoutOverrideCoverage; bool geoPassthroughEXT; - int numShaderRecordNVBlocks; + int numShaderRecordBlocks; ComputeDerivativeMode computeDerivativeMode; int primitives; int numTaskNVBlocks; + bool layoutPrimitiveCulling; // Base shift values std::array shiftBinding; @@ -975,6 +1025,7 @@ protected: std::unordered_map uniformLocationOverrides; int uniformLocationBase; + TNumericFeatures numericFeatures; #endif std::unordered_set usedConstantId; // specialization constant ids used diff --git a/thirdparty/glslang/glslang/MachineIndependent/parseConst.cpp b/thirdparty/glslang/glslang/MachineIndependent/parseConst.cpp index 1a8e6d9987c..7c04743ba68 100644 --- a/thirdparty/glslang/glslang/MachineIndependent/parseConst.cpp +++ b/thirdparty/glslang/glslang/MachineIndependent/parseConst.cpp @@ -165,17 +165,27 @@ void TConstTraverser::visitConstantUnion(TIntermConstantUnion* node) } } } else { - // matrix from vector + // matrix from vector or scalar int count = 0; const int startIndex = index; int nodeComps = node->getType().computeNumComponents(); for (int i = startIndex; i < endIndex; i++) { if (i >= instanceSize) return; - if (i == startIndex || (i - startIndex) % (matrixRows + 1) == 0 ) + if (nodeComps == 1) { + // If there is a single scalar parameter to a matrix + // constructor, it is used to initialize all the + // components on the matrix's diagonal, with the + // remaining components initialized to 0.0. + if (i == startIndex || (i - startIndex) % (matrixRows + 1) == 0 ) + leftUnionArray[i] = rightUnionArray[count]; + else + leftUnionArray[i].setDConst(0.0); + } else { + // construct the matrix in column-major order, from + // the components provided, in order leftUnionArray[i] = rightUnionArray[count]; - else - leftUnionArray[i].setDConst(0.0); + } index++; diff --git a/thirdparty/glslang/glslang/MachineIndependent/parseVersions.h b/thirdparty/glslang/glslang/MachineIndependent/parseVersions.h index aa1964fc2e5..7248354e4b6 100644 --- a/thirdparty/glslang/glslang/MachineIndependent/parseVersions.h +++ b/thirdparty/glslang/glslang/MachineIndependent/parseVersions.h @@ -58,7 +58,7 @@ public: const SpvVersion& spvVersion, EShLanguage language, TInfoSink& infoSink, bool forwardCompatible, EShMessages messages) : -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) forwardCompatible(forwardCompatible), profile(profile), #endif @@ -101,6 +101,7 @@ public: void updateExtensionBehavior(int line, const char* const extension, const char* behavior) { } void updateExtensionBehavior(const char* const extension, TExtensionBehavior) { } void checkExtensionStage(const TSourceLoc&, const char* const extension) { } + void extensionRequires(const TSourceLoc&, const char* const extension, const char* behavior) { } void fullIntegerCheck(const TSourceLoc&, const char* op) { } void doubleCheck(const TSourceLoc&, const char* op) { } bool float16Arithmetic() { return false; } @@ -115,9 +116,14 @@ public: bool relaxedErrors() const { return false; } bool suppressWarnings() const { return true; } bool isForwardCompatible() const { return false; } +#else +#ifdef GLSLANG_ANGLE + const bool forwardCompatible = true; + const EProfile profile = ECoreProfile; #else bool forwardCompatible; // true if errors are to be given for use of deprecated features EProfile profile; // the declared profile in the shader (core by default) +#endif bool isEsProfile() const { return profile == EEsProfile; } void requireProfile(const TSourceLoc& loc, int profileMask, const char* featureDesc); void profileRequires(const TSourceLoc& loc, int profileMask, int minVersion, int numExtensions, @@ -139,6 +145,7 @@ public: virtual bool checkExtensionsRequested(const TSourceLoc&, int numExtensions, const char* const extensions[], const char* featureDesc); virtual void checkExtensionStage(const TSourceLoc&, const char* const extension); + virtual void extensionRequires(const TSourceLoc&, const char* const extension, const char* behavior); virtual void fullIntegerCheck(const TSourceLoc&, const char* op); virtual void unimplemented(const TSourceLoc&, const char* featureDesc); @@ -170,6 +177,7 @@ public: virtual void vulkanRemoved(const TSourceLoc&, const char* op); virtual void requireVulkan(const TSourceLoc&, const char* op); virtual void requireSpv(const TSourceLoc&, const char* op); + virtual void requireSpv(const TSourceLoc&, const char *op, unsigned int version); #if defined(GLSLANG_WEB) && !defined(GLSLANG_WEB_DEVEL) @@ -221,7 +229,8 @@ public: TIntermediate& intermediate; // helper for making and hooking up pieces of the parse tree protected: - TMap extensionBehavior; // for each extension string, what its current behavior is set to + TMap extensionBehavior; // for each extension string, what its current behavior is + TMap extensionMinSpv; // for each extension string, store minimum spirv required EShMessages messages; // errors/warnings/rule-sets int numErrors; // number of compile-time errors encountered TInputScanner* currentScanner; diff --git a/thirdparty/glslang/glslang/MachineIndependent/preprocessor/Pp.cpp b/thirdparty/glslang/glslang/MachineIndependent/preprocessor/Pp.cpp old mode 100755 new mode 100644 index d7ff485c0aa..a0a626f9b71 --- a/thirdparty/glslang/glslang/MachineIndependent/preprocessor/Pp.cpp +++ b/thirdparty/glslang/glslang/MachineIndependent/preprocessor/Pp.cpp @@ -422,10 +422,10 @@ int TPpContext::eval(int token, int precedence, bool shortCircuit, int& res, boo if (! parseContext.isReadingHLSL() && isMacroInput()) { if (parseContext.relaxedErrors()) parseContext.ppWarn(ppToken->loc, "nonportable when expanded from macros for preprocessor expression", - "defined", ""); + "defined", ""); else parseContext.ppError(ppToken->loc, "cannot use in preprocessor expression when expanded from macros", - "defined", ""); + "defined", ""); } bool needclose = 0; token = scanToken(ppToken); @@ -621,14 +621,25 @@ int TPpContext::CPPinclude(TPpToken* ppToken) { const TSourceLoc directiveLoc = ppToken->loc; bool startWithLocalSearch = true; // to additionally include the extra "" paths - int token = scanToken(ppToken); + int token; - // handle -style #include - if (token == '<') { + // Find the first non-whitespace char after #include + int ch = getChar(); + while (ch == ' ' || ch == '\t') { + ch = getChar(); + } + if (ch == '<') { + // style startWithLocalSearch = false; token = scanHeaderName(ppToken, '>'); + } else if (ch == '"') { + // "header-name" style + token = scanHeaderName(ppToken, '"'); + } else { + // unexpected, get the full token to generate the error + ungetChar(); + token = scanToken(ppToken); } - // otherwise ppToken already has the header name and it was "header-name" style if (token != PpAtomConstString) { parseContext.ppError(directiveLoc, "must be followed by a header name", "#include", ""); @@ -711,7 +722,9 @@ int TPpContext::CPPline(TPpToken* ppToken) const char* sourceName = nullptr; // Optional source file name. bool lineErr = false; bool fileErr = false; + disableEscapeSequences = true; token = eval(token, MIN_PRECEDENCE, false, lineRes, lineErr, ppToken); + disableEscapeSequences = false; if (! lineErr) { lineToken = lineRes; if (token == '\n') @@ -754,7 +767,9 @@ int TPpContext::CPPline(TPpToken* ppToken) // Handle #error int TPpContext::CPPerror(TPpToken* ppToken) { + disableEscapeSequences = true; int token = scanToken(ppToken); + disableEscapeSequences = false; std::string message; TSourceLoc loc = ppToken->loc; @@ -1169,7 +1184,9 @@ MacroExpandResult TPpContext::MacroExpand(TPpToken* ppToken, bool expandUndef, b int macroAtom = atomStrings.getAtom(ppToken->name); switch (macroAtom) { case PpAtomLineMacro: - ppToken->ival = parseContext.getCurrentLoc().line; + // Arguments which are macro have been replaced in the first stage. + if (ppToken->ival == 0) + ppToken->ival = parseContext.getCurrentLoc().line; snprintf(ppToken->name, sizeof(ppToken->name), "%d", ppToken->ival); UngetToken(PpAtomConstInt, ppToken); return MacroExpandStarted; @@ -1270,6 +1287,11 @@ MacroExpandResult TPpContext::MacroExpand(TPpToken* ppToken, bool expandUndef, b nestStack.push_back('}'); else if (nestStack.size() > 0 && token == nestStack.back()) nestStack.pop_back(); + + //Macro replacement list is expanded in the last stage. + if (atomStrings.getAtom(ppToken->name) == PpAtomLineMacro) + ppToken->ival = parseContext.getCurrentLoc().line; + in->args[arg]->putToken(token, ppToken); tokenRecorded = true; } diff --git a/thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpContext.cpp b/thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpContext.cpp old mode 100755 new mode 100644 index cc003a8d12b..1363ce2be04 --- a/thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpContext.cpp +++ b/thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpContext.cpp @@ -87,7 +87,8 @@ namespace glslang { TPpContext::TPpContext(TParseContextBase& pc, const std::string& rootFileName, TShader::Includer& inclr) : preamble(0), strings(0), previous_token('\n'), parseContext(pc), includer(inclr), inComment(false), rootFileName(rootFileName), - currentSourceFile(rootFileName) + currentSourceFile(rootFileName), + disableEscapeSequences(false) { ifdepth = 0; for (elsetracker = 0; elsetracker < maxIfNesting; elsetracker++) diff --git a/thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpContext.h b/thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpContext.h index 8470e172a22..714b5eadba4 100644 --- a/thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpContext.h +++ b/thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpContext.h @@ -105,13 +105,13 @@ public: } // Used for comparing macro definitions, so checks what is relevant for that. - bool operator==(const TPpToken& right) + bool operator==(const TPpToken& right) const { return space == right.space && ival == right.ival && dval == right.dval && i64val == right.i64val && strncmp(name, right.name, MaxTokenLength) == 0; } - bool operator!=(const TPpToken& right) { return ! operator==(right); } + bool operator!=(const TPpToken& right) const { return ! operator==(right); } TSourceLoc loc; // True if a space (for white space or a removed comment) should also be @@ -695,6 +695,7 @@ protected: std::string currentSourceFile; std::istringstream strtodStream; + bool disableEscapeSequences; }; } // end namespace glslang diff --git a/thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpScanner.cpp b/thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpScanner.cpp old mode 100755 new mode 100644 index c293af3c1ea..e0f44f8b4f1 --- a/thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpScanner.cpp +++ b/thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpScanner.cpp @@ -1026,12 +1026,80 @@ int TPpContext::tStringInput::scan(TPpToken* ppToken) case '\'': return pp->characterLiteral(ppToken); case '"': - // TODO: If this gets enhanced to handle escape sequences, or - // anything that is different than what #include needs, then - // #include needs to use scanHeaderName() for this. + // #include uses scanHeaderName() to ignore these escape sequences. ch = getch(); while (ch != '"' && ch != '\n' && ch != EndOfInput) { if (len < MaxTokenLength) { + if (ch == '\\' && !pp->disableEscapeSequences) { + int nextCh = getch(); + switch (nextCh) { + case '\'': ch = 0x27; break; + case '"': ch = 0x22; break; + case '?': ch = 0x3f; break; + case '\\': ch = 0x5c; break; + case 'a': ch = 0x07; break; + case 'b': ch = 0x08; break; + case 'f': ch = 0x0c; break; + case 'n': ch = 0x0a; break; + case 'r': ch = 0x0d; break; + case 't': ch = 0x09; break; + case 'v': ch = 0x0b; break; + case 'x': + // Hex value, arbitrary number of characters. Terminated by the first + // non-hex digit + { + int numDigits = 0; + ch = 0; + while (true) { + nextCh = getch(); + if (nextCh >= '0' && nextCh <= '9') + nextCh -= '0'; + else if (nextCh >= 'A' && nextCh <= 'F') + nextCh -= 'A' - 10; + else if (nextCh >= 'a' && nextCh <= 'f') + nextCh -= 'a' - 10; + else { + ungetch(); + break; + } + numDigits++; + ch = ch * 0x10 + nextCh; + } + if (numDigits == 0) { + pp->parseContext.ppError(ppToken->loc, "Expected hex value in escape sequence", "string", ""); + } + break; + } + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + // Octal value, up to three octal digits + { + int numDigits = 1; + ch = nextCh - '0'; + while (numDigits < 3) { + nextCh = getch(); + if (nextCh >= '0' && nextCh <= '7') + nextCh -= '0'; + else { + ungetch(); + break; + } + numDigits++; + ch = ch * 8 + nextCh; + } + break; + } + default: + pp->parseContext.ppError(ppToken->loc, "Invalid escape sequence", "string", ""); + break; + } + } ppToken->name[len] = (char)ch; len++; ch = getch(); @@ -1120,10 +1188,12 @@ int TPpContext::tokenize(TPpToken& ppToken) continue; break; case PpAtomConstString: + // HLSL allows string literals. + // GLSL allows string literals with GL_EXT_debug_printf. if (ifdepth == 0 && parseContext.intermediate.getSource() != EShSourceHlsl) { - // HLSL allows string literals. - parseContext.ppError(ppToken.loc, "string literals not supported", "\"\"", ""); - continue; + parseContext.requireExtensions(ppToken.loc, 1, &E_GL_EXT_debug_printf, "string literal"); + if (!parseContext.extensionTurnedOn(E_GL_EXT_debug_printf)) + continue; } break; case '\'': diff --git a/thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpTokens.cpp b/thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpTokens.cpp old mode 100755 new mode 100644 diff --git a/thirdparty/glslang/glslang/MachineIndependent/propagateNoContraction.cpp b/thirdparty/glslang/glslang/MachineIndependent/propagateNoContraction.cpp index 83a3230f515..9def592bafa 100644 --- a/thirdparty/glslang/glslang/MachineIndependent/propagateNoContraction.cpp +++ b/thirdparty/glslang/glslang/MachineIndependent/propagateNoContraction.cpp @@ -867,4 +867,4 @@ void PropagateNoContraction(const glslang::TIntermediate& intermediate) } }; -#endif // GLSLANG_WEB \ No newline at end of file +#endif // GLSLANG_WEB diff --git a/thirdparty/glslang/glslang/MachineIndependent/reflection.cpp b/thirdparty/glslang/glslang/MachineIndependent/reflection.cpp index b09367113cf..0aabf37fba3 100644 --- a/thirdparty/glslang/glslang/MachineIndependent/reflection.cpp +++ b/thirdparty/glslang/glslang/MachineIndependent/reflection.cpp @@ -33,7 +33,7 @@ // POSSIBILITY OF SUCH DAMAGE. // -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) #include "../Include/Common.h" #include "reflection.h" @@ -77,10 +77,10 @@ namespace glslang { // This is in the glslang namespace directly so it can be a friend of TReflection. // -class TReflectionTraverser : public TLiveTraverser { +class TReflectionTraverser : public TIntermTraverser { public: TReflectionTraverser(const TIntermediate& i, TReflection& r) : - TLiveTraverser(i), reflection(r) { } + TIntermTraverser(), intermediate(i), reflection(r), updateStageMasks(true) { } virtual bool visitBinary(TVisit, TIntermBinary* node); virtual void visitSymbol(TIntermSymbol* base); @@ -92,11 +92,28 @@ public: if (processedDerefs.find(&base) == processedDerefs.end()) { processedDerefs.insert(&base); + int blockIndex = -1; + int offset = -1; + TList derefs; + TString baseName = base.getName(); + + if (base.getType().getBasicType() == EbtBlock) { + offset = 0; + bool anonymous = IsAnonymous(baseName); + const TString& blockName = base.getType().getTypeName(); + + if (!anonymous) + baseName = blockName; + else + baseName = ""; + + blockIndex = addBlockName(blockName, base.getType(), intermediate.getBlockSize(base.getType())); + } + // Use a degenerate (empty) set of dereferences to immediately put as at the end of // the dereference change expected by blowUpActiveAggregate. - TList derefs; - blowUpActiveAggregate(base.getType(), base.getName(), derefs, derefs.end(), -1, -1, 0, 0, - base.getQualifier().storage, true); + blowUpActiveAggregate(base.getType(), baseName, derefs, derefs.end(), offset, blockIndex, 0, -1, 0, + base.getQualifier().storage, updateStageMasks); } } @@ -155,9 +172,9 @@ public: void getOffsets(const TType& type, TVector& offsets) { const TTypeList& memberList = *type.getStruct(); - int memberSize = 0; int offset = 0; + for (size_t m = 0; m < offsets.size(); ++m) { // if the user supplied an offset, snap to it now if (memberList[m].type->getQualifier().hasOffset()) @@ -233,7 +250,7 @@ public: // A value of 0 for arraySize will mean to use the full array's size. void blowUpActiveAggregate(const TType& baseType, const TString& baseName, const TList& derefs, TList::const_iterator deref, int offset, int blockIndex, int arraySize, - int topLevelArrayStride, TStorageQualifier baseStorage, bool active) + int topLevelArraySize, int topLevelArrayStride, TStorageQualifier baseStorage, bool active) { // when strictArraySuffix is enabled, we closely follow the rules from ARB_program_interface_query. // Broadly: @@ -262,14 +279,15 @@ public: // Visit all the indices of this array, and for each one add on the remaining dereferencing for (int i = 0; i < std::max(visitNode->getLeft()->getType().getOuterArraySize(), 1); ++i) { TString newBaseName = name; - if (strictArraySuffix && blockParent) + if (terminalType->getBasicType() == EbtBlock) {} + else if (strictArraySuffix && blockParent) newBaseName.append(TString("[0]")); else if (strictArraySuffix || baseType.getBasicType() != EbtBlock) newBaseName.append(TString("[") + String(i) + "]"); TList::const_iterator nextDeref = deref; ++nextDeref; blowUpActiveAggregate(*terminalType, newBaseName, derefs, nextDeref, offset, blockIndex, arraySize, - topLevelArrayStride, baseStorage, active); + topLevelArraySize, topLevelArrayStride, baseStorage, active); if (offset >= 0) offset += stride; @@ -282,9 +300,10 @@ public: int stride = getArrayStride(baseType, visitNode->getLeft()->getType()); index = visitNode->getRight()->getAsConstantUnion()->getConstArray()[0].getIConst(); - if (strictArraySuffix && blockParent) { + if (terminalType->getBasicType() == EbtBlock) {} + else if (strictArraySuffix && blockParent) name.append(TString("[0]")); - } else if (strictArraySuffix || baseType.getBasicType() != EbtBlock) { + else if (strictArraySuffix || baseType.getBasicType() != EbtBlock) { name.append(TString("[") + String(index) + "]"); if (offset >= 0) @@ -294,7 +313,10 @@ public: if (topLevelArrayStride == 0) topLevelArrayStride = stride; - blockParent = false; + // expand top-level arrays in blocks with [0] suffix + if (topLevelArrayStride != 0 && visitNode->getLeft()->getType().isArray()) { + blockParent = false; + } break; } case EOpIndexDirectStruct: @@ -304,6 +326,12 @@ public: if (name.size() > 0) name.append("."); name.append((*visitNode->getLeft()->getType().getStruct())[index].type->getFieldName()); + + // expand non top-level arrays with [x] suffix + if (visitNode->getLeft()->getType().getBasicType() != EbtBlock && terminalType->isArray()) + { + blockParent = false; + } break; default: break; @@ -323,24 +351,27 @@ public: if (offset >= 0) stride = getArrayStride(baseType, *terminalType); - if (topLevelArrayStride == 0) - topLevelArrayStride = stride; - int arrayIterateSize = std::max(terminalType->getOuterArraySize(), 1); // for top-level arrays in blocks, only expand [0] to avoid explosion of items - if (strictArraySuffix && blockParent) + if ((strictArraySuffix && blockParent) || + ((topLevelArraySize == arrayIterateSize) && (topLevelArrayStride == 0))) { arrayIterateSize = 1; + } + + if (topLevelArrayStride == 0) + topLevelArrayStride = stride; for (int i = 0; i < arrayIterateSize; ++i) { TString newBaseName = name; - newBaseName.append(TString("[") + String(i) + "]"); + if (terminalType->getBasicType() != EbtBlock) + newBaseName.append(TString("[") + String(i) + "]"); TType derefType(*terminalType, 0); if (offset >= 0) offset = baseOffset + stride * i; blowUpActiveAggregate(derefType, newBaseName, derefs, derefs.end(), offset, blockIndex, 0, - topLevelArrayStride, baseStorage, active); + topLevelArraySize, topLevelArrayStride, baseStorage, active); } } else { // Visit all members of this aggregate, and for each one, @@ -369,8 +400,31 @@ public: arrayStride = getArrayStride(baseType, derefType); } - blowUpActiveAggregate(derefType, newBaseName, derefs, derefs.end(), offset, blockIndex, 0, - arrayStride, baseStorage, active); + if (topLevelArraySize == -1 && arrayStride == 0 && blockParent) + topLevelArraySize = 1; + + if (strictArraySuffix && blockParent) { + // if this member is an array, store the top-level array stride but start the explosion from + // the inner struct type. + if (derefType.isArray() && derefType.isStruct()) { + newBaseName.append("[0]"); + auto dimSize = derefType.isUnsizedArray() ? 0 : derefType.getArraySizes()->getDimSize(0); + blowUpActiveAggregate(TType(derefType, 0), newBaseName, derefs, derefs.end(), memberOffsets[i], + blockIndex, 0, dimSize, arrayStride, terminalType->getQualifier().storage, false); + } + else if (derefType.isArray()) { + auto dimSize = derefType.isUnsizedArray() ? 0 : derefType.getArraySizes()->getDimSize(0); + blowUpActiveAggregate(derefType, newBaseName, derefs, derefs.end(), memberOffsets[i], blockIndex, + 0, dimSize, 0, terminalType->getQualifier().storage, false); + } + else { + blowUpActiveAggregate(derefType, newBaseName, derefs, derefs.end(), memberOffsets[i], blockIndex, + 0, 1, 0, terminalType->getQualifier().storage, false); + } + } else { + blowUpActiveAggregate(derefType, newBaseName, derefs, derefs.end(), offset, blockIndex, 0, + topLevelArraySize, arrayStride, baseStorage, active); + } } } @@ -406,6 +460,7 @@ public: if ((reflection.options & EShReflectionSeparateBuffers) && terminalType->isAtomic()) reflection.atomicCounterUniformIndices.push_back(uniformIndex); + variables.back().topLevelArraySize = topLevelArraySize; variables.back().topLevelArrayStride = topLevelArrayStride; if ((reflection.options & EShReflectionAllBlockVariables) && active) { @@ -537,65 +592,17 @@ public: if (! anonymous) baseName = blockName; - if (base->getType().isArray()) { - TType derefType(base->getType(), 0); - - assert(! anonymous); - for (int e = 0; e < base->getType().getCumulativeArraySize(); ++e) - blockIndex = addBlockName(blockName + "[" + String(e) + "]", derefType, - intermediate.getBlockSize(base->getType())); - baseName.append(TString("[0]")); - } else - blockIndex = addBlockName(blockName, base->getType(), intermediate.getBlockSize(base->getType())); + blockIndex = addBlockName(blockName, base->getType(), intermediate.getBlockSize(base->getType())); if (reflection.options & EShReflectionAllBlockVariables) { // Use a degenerate (empty) set of dereferences to immediately put as at the end of // the dereference change expected by blowUpActiveAggregate. TList derefs; - // because we don't have any derefs, the first thing blowUpActiveAggregate will do is iterate over each - // member in the struct definition. This will lose any information about whether the parent was a buffer - // block. So if we're using strict array rules which don't expand the first child of a buffer block we - // instead iterate over the children here. - const bool strictArraySuffix = (reflection.options & EShReflectionStrictArraySuffix); - bool blockParent = (base->getType().getBasicType() == EbtBlock && base->getQualifier().storage == EvqBuffer); - - if (strictArraySuffix && blockParent) { - TType structDerefType(base->getType(), 0); - - const TType &structType = base->getType().isArray() ? structDerefType : base->getType(); - const TTypeList& typeList = *structType.getStruct(); - - TVector memberOffsets; - - memberOffsets.resize(typeList.size()); - getOffsets(structType, memberOffsets); - - for (int i = 0; i < (int)typeList.size(); ++i) { - TType derefType(structType, i); - TString name = baseName; - if (name.size() > 0) - name.append("."); - name.append(typeList[i].type->getFieldName()); - - // if this member is an array, store the top-level array stride but start the explosion from - // the inner struct type. - if (derefType.isArray() && derefType.isStruct()) { - name.append("[0]"); - blowUpActiveAggregate(TType(derefType, 0), name, derefs, derefs.end(), memberOffsets[i], - blockIndex, 0, getArrayStride(structType, derefType), - base->getQualifier().storage, false); - } else { - blowUpActiveAggregate(derefType, name, derefs, derefs.end(), memberOffsets[i], blockIndex, - 0, 0, base->getQualifier().storage, false); - } - } - } else { - // otherwise - if we're not using strict array suffix rules, or this isn't a block so we are - // expanding root arrays anyway, just start the iteration from the base block type. - blowUpActiveAggregate(base->getType(), baseName, derefs, derefs.end(), 0, blockIndex, 0, 0, + // otherwise - if we're not using strict array suffix rules, or this isn't a block so we are + // expanding root arrays anyway, just start the iteration from the base block type. + blowUpActiveAggregate(base->getType(), baseName, derefs, derefs.end(), 0, blockIndex, 0, -1, 0, base->getQualifier().storage, false); - } } } @@ -626,30 +633,40 @@ public: else baseName = base->getName(); } - blowUpActiveAggregate(base->getType(), baseName, derefs, derefs.begin(), offset, blockIndex, arraySize, 0, + blowUpActiveAggregate(base->getType(), baseName, derefs, derefs.begin(), offset, blockIndex, arraySize, -1, 0, base->getQualifier().storage, true); } int addBlockName(const TString& name, const TType& type, int size) { - TReflection::TMapIndexToReflection& blocks = reflection.GetBlockMapForStorage(type.getQualifier().storage); - - int blockIndex; - TReflection::TNameToIndex::const_iterator it = reflection.nameToIndex.find(name.c_str()); - if (reflection.nameToIndex.find(name.c_str()) == reflection.nameToIndex.end()) { - blockIndex = (int)blocks.size(); - reflection.nameToIndex[name.c_str()] = blockIndex; - blocks.push_back(TObjectReflection(name.c_str(), type, -1, -1, size, -1)); - - blocks.back().numMembers = countAggregateMembers(type); - - EShLanguageMask& stages = blocks.back().stages; - stages = static_cast(stages | 1 << intermediate.getStage()); + int blockIndex = 0; + if (type.isArray()) { + TType derefType(type, 0); + for (int e = 0; e < type.getOuterArraySize(); ++e) { + int memberBlockIndex = addBlockName(name + "[" + String(e) + "]", derefType, size); + if (e == 0) + blockIndex = memberBlockIndex; + } } else { - blockIndex = it->second; + TReflection::TMapIndexToReflection& blocks = reflection.GetBlockMapForStorage(type.getQualifier().storage); - EShLanguageMask& stages = blocks[blockIndex].stages; - stages = static_cast(stages | 1 << intermediate.getStage()); + TReflection::TNameToIndex::const_iterator it = reflection.nameToIndex.find(name.c_str()); + if (reflection.nameToIndex.find(name.c_str()) == reflection.nameToIndex.end()) { + blockIndex = (int)blocks.size(); + reflection.nameToIndex[name.c_str()] = blockIndex; + blocks.push_back(TObjectReflection(name.c_str(), type, -1, -1, size, blockIndex)); + + blocks.back().numMembers = countAggregateMembers(type); + + EShLanguageMask& stages = blocks.back().stages; + stages = static_cast(stages | 1 << intermediate.getStage()); + } + else { + blockIndex = it->second; + + EShLanguageMask& stages = blocks[blockIndex].stages; + stages = static_cast(stages | 1 << intermediate.getStage()); + } } return blockIndex; @@ -995,8 +1012,10 @@ public: return type.isArray() ? type.getOuterArraySize() : 1; } + const TIntermediate& intermediate; TReflection& reflection; std::set processedDerefs; + bool updateStageMasks; protected: TReflectionTraverser(TReflectionTraverser&); @@ -1029,7 +1048,21 @@ bool TReflectionTraverser::visitBinary(TVisit /* visit */, TIntermBinary* node) // To reflect non-dereferenced objects. void TReflectionTraverser::visitSymbol(TIntermSymbol* base) { - if (base->getQualifier().storage == EvqUniform) + if (base->getQualifier().storage == EvqUniform) { + if (base->getBasicType() == EbtBlock) { + if (reflection.options & EShReflectionSharedStd140UBO) { + addUniform(*base); + } + } else { + addUniform(*base); + } + } + + // #TODO add std140/layout active rules for ssbo, same with ubo. + // Storage buffer blocks will be collected and expanding in this part. + if((reflection.options & EShReflectionSharedStd140SSBO) && + (base->getQualifier().storage == EvqBuffer && base->getBasicType() == EbtBlock && + (base->getQualifier().layoutPacking == ElpStd140 || base->getQualifier().layoutPacking == ElpShared))) addUniform(*base); if ((intermediate.getStage() == reflection.firstStage && base->getQualifier().isPipeInput()) || @@ -1135,15 +1168,47 @@ bool TReflection::addStage(EShLanguage stage, const TIntermediate& intermediate) TReflectionTraverser it(intermediate, *this); - // put the entry point on the list of functions to process - it.pushFunction(intermediate.getEntryPointMangledName().c_str()); - - // process all the functions - while (! it.functions.empty()) { - TIntermNode* function = it.functions.back(); - it.functions.pop_back(); - function->traverse(&it); + for (auto& sequnence : intermediate.getTreeRoot()->getAsAggregate()->getSequence()) { + if (sequnence->getAsAggregate() != nullptr) { + if (sequnence->getAsAggregate()->getOp() == glslang::EOpLinkerObjects) { + it.updateStageMasks = false; + TIntermAggregate* linkerObjects = sequnence->getAsAggregate(); + for (auto& sequnence : linkerObjects->getSequence()) { + auto pNode = sequnence->getAsSymbolNode(); + if (pNode != nullptr) { + if ((pNode->getQualifier().storage == EvqUniform && + (options & EShReflectionSharedStd140UBO)) || + (pNode->getQualifier().storage == EvqBuffer && + (options & EShReflectionSharedStd140SSBO))) { + // collect std140 and shared uniform block form AST + if ((pNode->getBasicType() == EbtBlock) && + ((pNode->getQualifier().layoutPacking == ElpStd140) || + (pNode->getQualifier().layoutPacking == ElpShared))) { + pNode->traverse(&it); + } + } + else if ((options & EShReflectionAllIOVariables) && + (pNode->getQualifier().isPipeInput() || pNode->getQualifier().isPipeOutput())) + { + pNode->traverse(&it); + } + } + } + } else { + // This traverser will travers all function in AST. + // If we want reflect uncalled function, we need set linke message EShMsgKeepUncalled. + // When EShMsgKeepUncalled been set to true, all function will be keep in AST, even it is a uncalled function. + // This will keep some uniform variables in reflection, if those uniform variables is used in these uncalled function. + // + // If we just want reflect only live node, we can use a default link message or set EShMsgKeepUncalled false. + // When linke message not been set EShMsgKeepUncalled, linker won't keep uncalled function in AST. + // So, travers all function node can equivalent to travers live function. + it.updateStageMasks = true; + sequnence->getAsAggregate()->traverse(&it); + } + } } + it.updateStageMasks = true; buildCounterIndices(intermediate); buildUniformStageMask(intermediate); @@ -1188,7 +1253,7 @@ void TReflection::dump() for (int dim=0; dim<3; ++dim) if (getLocalSize(dim) > 1) - printf("Local size %s: %d\n", axis[dim], getLocalSize(dim)); + printf("Local size %s: %u\n", axis[dim], getLocalSize(dim)); printf("\n"); } @@ -1201,4 +1266,4 @@ void TReflection::dump() } // end namespace glslang -#endif // GLSLANG_WEB +#endif // !GLSLANG_WEB && !GLSLANG_ANGLE diff --git a/thirdparty/glslang/glslang/MachineIndependent/reflection.h b/thirdparty/glslang/glslang/MachineIndependent/reflection.h index efdc8934fbd..5af4467c1fb 100644 --- a/thirdparty/glslang/glslang/MachineIndependent/reflection.h +++ b/thirdparty/glslang/glslang/MachineIndependent/reflection.h @@ -33,7 +33,7 @@ // POSSIBILITY OF SUCH DAMAGE. // -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) #ifndef _REFLECTION_INCLUDED #define _REFLECTION_INCLUDED @@ -220,4 +220,4 @@ protected: #endif // _REFLECTION_INCLUDED -#endif // GLSLANG_WEB \ No newline at end of file +#endif // !GLSLANG_WEB && !GLSLANG_ANGLE diff --git a/thirdparty/glslang/glslang/OSDependent/Web/glslang.js.cpp b/thirdparty/glslang/glslang/OSDependent/Web/glslang.js.cpp index 6cb93fe27e6..f2306a60922 100644 --- a/thirdparty/glslang/glslang/OSDependent/Web/glslang.js.cpp +++ b/thirdparty/glslang/glslang/OSDependent/Web/glslang.js.cpp @@ -141,6 +141,7 @@ const TBuiltInResource DefaultTBuiltInResource = { /* .maxTaskWorkGroupSizeY_NV = */ 1, /* .maxTaskWorkGroupSizeZ_NV = */ 1, /* .maxMeshViewCountNV = */ 4, + /* .maxDualSourceDrawBuffersEXT = */ 1, /* .limits = */ { /* .nonInductiveForLoops = */ 1, @@ -176,7 +177,12 @@ extern "C" { * If null, the compilation failed. */ EMSCRIPTEN_KEEPALIVE -void* convert_glsl_to_spirv(const char* glsl, int stage_int, bool gen_debug, uint32_t** spirv, size_t* spirv_len) +void* convert_glsl_to_spirv(const char* glsl, + int stage_int, + bool gen_debug, + glslang::EShTargetLanguageVersion spirv_version, + uint32_t** spirv, + size_t* spirv_len) { if (glsl == nullptr) { fprintf(stderr, "Input pointer null\n"); @@ -194,6 +200,18 @@ void* convert_glsl_to_spirv(const char* glsl, int stage_int, bool gen_debug, uin return nullptr; } EShLanguage stage = static_cast(stage_int); + switch (spirv_version) { + case glslang::EShTargetSpv_1_0: + case glslang::EShTargetSpv_1_1: + case glslang::EShTargetSpv_1_2: + case glslang::EShTargetSpv_1_3: + case glslang::EShTargetSpv_1_4: + case glslang::EShTargetSpv_1_5: + break; + default: + fprintf(stderr, "Invalid SPIR-V version number\n"); + return nullptr; + } if (!initialized) { glslang::InitializeProcess(); @@ -203,8 +221,8 @@ void* convert_glsl_to_spirv(const char* glsl, int stage_int, bool gen_debug, uin glslang::TShader shader(stage); shader.setStrings(&glsl, 1); shader.setEnvInput(glslang::EShSourceGlsl, stage, glslang::EShClientVulkan, 100); - shader.setEnvClient(glslang::EShClientVulkan, glslang::EShTargetVulkan_1_1); - shader.setEnvTarget(glslang::EShTargetSpv, glslang::EShTargetSpv_1_3); + shader.setEnvClient(glslang::EShClientVulkan, glslang::EShTargetVulkan_1_0); + shader.setEnvTarget(glslang::EShTargetSpv, spirv_version); if (!shader.parse(&DefaultTBuiltInResource, 100, true, EShMsgDefault)) { fprintf(stderr, "Parse failed\n"); fprintf(stderr, "%s\n", shader.getInfoLog()); @@ -260,7 +278,7 @@ void main() { })"; uint32_t* output; size_t output_len; - void* id = convert_glsl_to_spirv(input, 4, false, &output, &output_len); + void* id = convert_glsl_to_spirv(input, 4, false, glslang::EShTargetSpv_1_0, &output, &output_len); assert(output != nullptr); assert(output_len != 0); destroy_output_buffer(id); diff --git a/thirdparty/glslang/glslang/OSDependent/Web/glslang.pre.js b/thirdparty/glslang/glslang/OSDependent/Web/glslang.pre.js index 7d3fd0234c3..46a569506d7 100644 --- a/thirdparty/glslang/glslang/OSDependent/Web/glslang.pre.js +++ b/thirdparty/glslang/glslang/OSDependent/Web/glslang.pre.js @@ -1,23 +1,34 @@ -Module['compileGLSLZeroCopy'] = function(glsl, shader_stage, gen_debug) { +Module['compileGLSLZeroCopy'] = function(glsl, shader_stage, gen_debug, spirv_version) { gen_debug = !!gen_debug; - var shader_stage_int; - if (shader_stage === 'vertex') { - shader_stage_int = 0; - } else if (shader_stage === 'fragment') { - shader_stage_int = 4; - } else if (shader_stage === 'compute') { - shader_stage_int = 5; - } else { - throw new Error("shader_stage must be 'vertex', 'fragment', or 'compute'"); + var shader_stage_int; // EShLanguage + switch (shader_stage) { + case 'vertex': shader_stage_int = 0; break; + case 'fragment': shader_stage_int = 4; break; + case 'compute': shader_stage_int = 5; break; + default: + throw new Error("shader_stage must be 'vertex', 'fragment', or 'compute'."); + } + + spirv_version = spirv_version || '1.0'; + var spirv_version_int; // EShTargetLanguageVersion + switch (spirv_version) { + case '1.0': spirv_version_int = (1 << 16) | (0 << 8); break; + case '1.1': spirv_version_int = (1 << 16) | (1 << 8); break; + case '1.2': spirv_version_int = (1 << 16) | (2 << 8); break; + case '1.3': spirv_version_int = (1 << 16) | (3 << 8); break; + case '1.4': spirv_version_int = (1 << 16) | (4 << 8); break; + case '1.5': spirv_version_int = (1 << 16) | (5 << 8); break; + default: + throw new Error("spirv_version must be '1.0' ~ '1.5'."); } var p_output = Module['_malloc'](4); var p_output_len = Module['_malloc'](4); var id = ccall('convert_glsl_to_spirv', 'number', - ['string', 'number', 'boolean', 'number', 'number'], - [glsl, shader_stage_int, gen_debug, p_output, p_output_len]); + ['string', 'number', 'boolean', 'number', 'number', 'number'], + [glsl, shader_stage_int, gen_debug, spirv_version_int, p_output, p_output_len]); var output = getValue(p_output, 'i32'); var output_len = getValue(p_output_len, 'i32'); Module['_free'](p_output); @@ -37,8 +48,8 @@ Module['compileGLSLZeroCopy'] = function(glsl, shader_stage, gen_debug) { return ret; }; -Module['compileGLSL'] = function(glsl, shader_stage, gen_debug) { - var compiled = Module['compileGLSLZeroCopy'](glsl, shader_stage, gen_debug); +Module['compileGLSL'] = function(glsl, shader_stage, gen_debug, spirv_version) { + var compiled = Module['compileGLSLZeroCopy'](glsl, shader_stage, gen_debug, spirv_version); var ret = compiled['data'].slice() compiled['free'](); return ret; diff --git a/thirdparty/glslang/glslang/Public/ShaderLang.h b/thirdparty/glslang/glslang/Public/ShaderLang.h old mode 100755 new mode 100644 index 4fe5c7df192..273f1569a00 --- a/thirdparty/glslang/glslang/Public/ShaderLang.h +++ b/thirdparty/glslang/glslang/Public/ShaderLang.h @@ -44,16 +44,25 @@ #include #ifdef _WIN32 -#define C_DECL __cdecl -//#ifdef SH_EXPORTING -// #define SH_IMPORT_EXPORT __declspec(dllexport) -//#else -// #define SH_IMPORT_EXPORT __declspec(dllimport) -//#endif -#define SH_IMPORT_EXPORT + #define C_DECL __cdecl #else -#define SH_IMPORT_EXPORT -#define C_DECL + #define C_DECL +#endif + +#ifdef GLSLANG_IS_SHARED_LIBRARY + #ifdef _WIN32 + #ifdef GLSLANG_EXPORTING + #define GLSLANG_EXPORT __declspec(dllexport) + #else + #define GLSLANG_EXPORT __declspec(dllimport) + #endif + #elif __GNUC__ >= 4 + #define GLSLANG_EXPORT __attribute__((visibility("default"))) + #endif +#endif // GLSLANG_IS_SHARED_LIBRARY + +#ifndef GLSLANG_EXPORT +#define GLSLANG_EXPORT #endif // @@ -65,22 +74,17 @@ extern "C" { #endif -// This should always increase, as some paths to do not consume -// a more major number. -// It should increment by one when new functionality is added. -#define GLSLANG_MINOR_VERSION 13 - // // Call before doing any other compiler/linker operations. // // (Call once per process, not once per thread.) // -SH_IMPORT_EXPORT int ShInitialize(); +GLSLANG_EXPORT int ShInitialize(); // // Call this at process shutdown to clean up memory. // -SH_IMPORT_EXPORT int ShFinalize(); +GLSLANG_EXPORT int ShFinalize(); // // Types of languages the compiler can consume. @@ -92,32 +96,45 @@ typedef enum { EShLangGeometry, EShLangFragment, EShLangCompute, - EShLangRayGenNV, - EShLangIntersectNV, - EShLangAnyHitNV, - EShLangClosestHitNV, - EShLangMissNV, - EShLangCallableNV, + EShLangRayGen, + EShLangRayGenNV = EShLangRayGen, + EShLangIntersect, + EShLangIntersectNV = EShLangIntersect, + EShLangAnyHit, + EShLangAnyHitNV = EShLangAnyHit, + EShLangClosestHit, + EShLangClosestHitNV = EShLangClosestHit, + EShLangMiss, + EShLangMissNV = EShLangMiss, + EShLangCallable, + EShLangCallableNV = EShLangCallable, EShLangTaskNV, EShLangMeshNV, - EShLangCount, + LAST_ELEMENT_MARKER(EShLangCount), } EShLanguage; // would be better as stage, but this is ancient now -typedef enum { +typedef enum : unsigned { EShLangVertexMask = (1 << EShLangVertex), EShLangTessControlMask = (1 << EShLangTessControl), EShLangTessEvaluationMask = (1 << EShLangTessEvaluation), EShLangGeometryMask = (1 << EShLangGeometry), EShLangFragmentMask = (1 << EShLangFragment), EShLangComputeMask = (1 << EShLangCompute), - EShLangRayGenNVMask = (1 << EShLangRayGenNV), - EShLangIntersectNVMask = (1 << EShLangIntersectNV), - EShLangAnyHitNVMask = (1 << EShLangAnyHitNV), - EShLangClosestHitNVMask = (1 << EShLangClosestHitNV), - EShLangMissNVMask = (1 << EShLangMissNV), - EShLangCallableNVMask = (1 << EShLangCallableNV), + EShLangRayGenMask = (1 << EShLangRayGen), + EShLangRayGenNVMask = EShLangRayGenMask, + EShLangIntersectMask = (1 << EShLangIntersect), + EShLangIntersectNVMask = EShLangIntersectMask, + EShLangAnyHitMask = (1 << EShLangAnyHit), + EShLangAnyHitNVMask = EShLangAnyHitMask, + EShLangClosestHitMask = (1 << EShLangClosestHit), + EShLangClosestHitNVMask = EShLangClosestHitMask, + EShLangMissMask = (1 << EShLangMiss), + EShLangMissNVMask = EShLangMissMask, + EShLangCallableMask = (1 << EShLangCallable), + EShLangCallableNVMask = EShLangCallableMask, EShLangTaskNVMask = (1 << EShLangTaskNV), EShLangMeshNVMask = (1 << EShLangMeshNV), + LAST_ELEMENT_MARKER(EShLanguageMaskCount), } EShLanguageMask; namespace glslang { @@ -128,18 +145,21 @@ typedef enum { EShSourceNone, EShSourceGlsl, // GLSL, includes ESSL (OpenGL ES GLSL) EShSourceHlsl, // HLSL + LAST_ELEMENT_MARKER(EShSourceCount), } EShSource; // if EShLanguage were EShStage, this could be EShLanguage instead typedef enum { EShClientNone, // use when there is no client, e.g. for validation EShClientVulkan, EShClientOpenGL, + LAST_ELEMENT_MARKER(EShClientCount), } EShClient; typedef enum { EShTargetNone, EShTargetSpv, // SPIR-V (preferred spelling) EshTargetSpv = EShTargetSpv, // legacy spelling + LAST_ELEMENT_MARKER(EShTargetCount), } EShTargetLanguage; typedef enum { @@ -147,6 +167,7 @@ typedef enum { EShTargetVulkan_1_1 = (1 << 22) | (1 << 12), // Vulkan 1.1 EShTargetVulkan_1_2 = (1 << 22) | (2 << 12), // Vulkan 1.2 EShTargetOpenGL_450 = 450, // OpenGL + LAST_ELEMENT_MARKER(EShTargetClientVersionCount), } EShTargetClientVersion; typedef EShTargetClientVersion EshTargetClientVersion; @@ -158,6 +179,7 @@ typedef enum { EShTargetSpv_1_3 = (1 << 16) | (3 << 8), // SPIR-V 1.3 EShTargetSpv_1_4 = (1 << 16) | (4 << 8), // SPIR-V 1.4 EShTargetSpv_1_5 = (1 << 16) | (5 << 8), // SPIR-V 1.5 + LAST_ELEMENT_MARKER(EShTargetLanguageVersionCount), } EShTargetLanguageVersion; struct TInputLanguage { @@ -187,7 +209,7 @@ struct TEnvironment { TTarget target; // what to generate }; -const char* StageName(EShLanguage); +GLSLANG_EXPORT const char* StageName(EShLanguage); } // end namespace glslang @@ -207,6 +229,7 @@ typedef enum { EShOptNone, EShOptSimple, // Optimizations that can be done quickly EShOptFull, // Optimizations that will take more time + LAST_ELEMENT_MARKER(EshOptLevelCount), } EShOptimizationLevel; // @@ -215,12 +238,13 @@ typedef enum { typedef enum { EShTexSampTransKeep, // keep textures and samplers as is (default) EShTexSampTransUpgradeTextureRemoveSampler, // change texture w/o embeded sampler into sampled texture and throw away all samplers + LAST_ELEMENT_MARKER(EShTexSampTransCount), } EShTextureSamplerTransformMode; // // Message choices for what errors and warnings are given. // -enum EShMessages { +enum EShMessages : unsigned { EShMsgDefault = 0, // default is to give all required errors and extra warnings EShMsgRelaxedErrors = (1 << 0), // be liberal in accepting input EShMsgSuppressWarnings = (1 << 1), // suppress all warnings, except those required by the specification @@ -235,21 +259,26 @@ enum EShMessages { EShMsgDebugInfo = (1 << 10), // save debug information EShMsgHlslEnable16BitTypes = (1 << 11), // enable use of 16-bit types in SPIR-V for HLSL EShMsgHlslLegalization = (1 << 12), // enable HLSL Legalization messages - EShMsgHlslDX9Compatible = (1 << 13), // enable HLSL DX9 compatible mode (right now only for samplers) + EShMsgHlslDX9Compatible = (1 << 13), // enable HLSL DX9 compatible mode (for samplers and semantics) EShMsgBuiltinSymbolTable = (1 << 14), // print the builtin symbol table + LAST_ELEMENT_MARKER(EShMsgCount), }; // // Options for building reflection // typedef enum { - EShReflectionDefault = 0, // default is original behaviour before options were added - EShReflectionStrictArraySuffix = (1 << 0), // reflection will follow stricter rules for array-of-structs suffixes - EShReflectionBasicArraySuffix = (1 << 1), // arrays of basic types will be appended with [0] as in GL reflection - EShReflectionIntermediateIO = (1 << 2), // reflect inputs and outputs to program, even with no vertex shader - EShReflectionSeparateBuffers = (1 << 3), // buffer variables and buffer blocks are reflected separately - EShReflectionAllBlockVariables = (1 << 4), // reflect all variables in blocks, even if they are inactive - EShReflectionUnwrapIOBlocks = (1 << 5), // unwrap input/output blocks the same as with uniform blocks + EShReflectionDefault = 0, // default is original behaviour before options were added + EShReflectionStrictArraySuffix = (1 << 0), // reflection will follow stricter rules for array-of-structs suffixes + EShReflectionBasicArraySuffix = (1 << 1), // arrays of basic types will be appended with [0] as in GL reflection + EShReflectionIntermediateIO = (1 << 2), // reflect inputs and outputs to program, even with no vertex shader + EShReflectionSeparateBuffers = (1 << 3), // buffer variables and buffer blocks are reflected separately + EShReflectionAllBlockVariables = (1 << 4), // reflect all variables in blocks, even if they are inactive + EShReflectionUnwrapIOBlocks = (1 << 5), // unwrap input/output blocks the same as with uniform blocks + EShReflectionAllIOVariables = (1 << 6), // reflect all input/output variables, even if they are inactive + EShReflectionSharedStd140SSBO = (1 << 7), // Apply std140/shared rules for ubo to ssbo + EShReflectionSharedStd140UBO = (1 << 8), // Apply std140/shared rules for ubo to ssbo + LAST_ELEMENT_MARKER(EShReflectionCount), } EShReflectionOptions; // @@ -281,10 +310,10 @@ typedef void* ShHandle; // Driver calls these to create and destroy compiler/linker // objects. // -SH_IMPORT_EXPORT ShHandle ShConstructCompiler(const EShLanguage, int debugOptions); // one per shader -SH_IMPORT_EXPORT ShHandle ShConstructLinker(const EShExecutable, int debugOptions); // one per shader pair -SH_IMPORT_EXPORT ShHandle ShConstructUniformMap(); // one per uniform namespace (currently entire program object) -SH_IMPORT_EXPORT void ShDestruct(ShHandle); +GLSLANG_EXPORT ShHandle ShConstructCompiler(const EShLanguage, int debugOptions); // one per shader +GLSLANG_EXPORT ShHandle ShConstructLinker(const EShExecutable, int debugOptions); // one per shader pair +GLSLANG_EXPORT ShHandle ShConstructUniformMap(); // one per uniform namespace (currently entire program object) +GLSLANG_EXPORT void ShDestruct(ShHandle); // // The return value of ShCompile is boolean, non-zero indicating @@ -293,7 +322,7 @@ SH_IMPORT_EXPORT void ShDestruct(ShHandle); // The info-log should be written by ShCompile into // ShHandle, so it can answer future queries. // -SH_IMPORT_EXPORT int ShCompile( +GLSLANG_EXPORT int ShCompile( const ShHandle, const char* const shaderStrings[], const int numStrings, @@ -306,7 +335,7 @@ SH_IMPORT_EXPORT int ShCompile( EShMessages messages = EShMsgDefault // warnings and errors ); -SH_IMPORT_EXPORT int ShLinkExt( +GLSLANG_EXPORT int ShLinkExt( const ShHandle, // linker object const ShHandle h[], // compiler objects to link together const int numHandles); @@ -315,26 +344,26 @@ SH_IMPORT_EXPORT int ShLinkExt( // ShSetEncrpytionMethod is a place-holder for specifying // how source code is encrypted. // -SH_IMPORT_EXPORT void ShSetEncryptionMethod(ShHandle); +GLSLANG_EXPORT void ShSetEncryptionMethod(ShHandle); // // All the following return 0 if the information is not // available in the object passed down, or the object is bad. // -SH_IMPORT_EXPORT const char* ShGetInfoLog(const ShHandle); -SH_IMPORT_EXPORT const void* ShGetExecutable(const ShHandle); -SH_IMPORT_EXPORT int ShSetVirtualAttributeBindings(const ShHandle, const ShBindingTable*); // to detect user aliasing -SH_IMPORT_EXPORT int ShSetFixedAttributeBindings(const ShHandle, const ShBindingTable*); // to force any physical mappings +GLSLANG_EXPORT const char* ShGetInfoLog(const ShHandle); +GLSLANG_EXPORT const void* ShGetExecutable(const ShHandle); +GLSLANG_EXPORT int ShSetVirtualAttributeBindings(const ShHandle, const ShBindingTable*); // to detect user aliasing +GLSLANG_EXPORT int ShSetFixedAttributeBindings(const ShHandle, const ShBindingTable*); // to force any physical mappings // // Tell the linker to never assign a vertex attribute to this list of physical attributes // -SH_IMPORT_EXPORT int ShExcludeAttributes(const ShHandle, int *attributes, int count); +GLSLANG_EXPORT int ShExcludeAttributes(const ShHandle, int *attributes, int count); // // Returns the location ID of the named uniform. // Returns -1 if error. // -SH_IMPORT_EXPORT int ShGetUniformLocation(const ShHandle uniformMap, const char* name); +GLSLANG_EXPORT int ShGetUniformLocation(const ShHandle uniformMap, const char* name); #ifdef __cplusplus } // end extern "C" @@ -365,19 +394,27 @@ class TInfoSink; namespace glslang { -const char* GetEsslVersionString(); -const char* GetGlslVersionString(); -int GetKhronosToolId(); +struct Version { + int major; + int minor; + int patch; + const char* flavor; +}; + +GLSLANG_EXPORT Version GetVersion(); +GLSLANG_EXPORT const char* GetEsslVersionString(); +GLSLANG_EXPORT const char* GetGlslVersionString(); +GLSLANG_EXPORT int GetKhronosToolId(); class TIntermediate; class TProgram; class TPoolAllocator; // Call this exactly once per process before using anything else -bool InitializeProcess(); +GLSLANG_EXPORT bool InitializeProcess(); // Call once per process to tear down everything -void FinalizeProcess(); +GLSLANG_EXPORT void FinalizeProcess(); // Resource type for IO resolver enum TResourceType { @@ -390,11 +427,14 @@ enum TResourceType { EResCount }; + // Make one TShader per shader that you will link into a program. Then // - provide the shader through setStrings() or setStringsWithLengths() // - optionally call setEnv*(), see below for more detail // - optionally use setPreamble() to set a special shader string that will be // processed before all others but won't affect the validity of #version +// - optionally call addProcesses() for each setting/transform, +// see comment for class TProcesses // - call parse(): source language and target environment must be selected // either by correct setting of EShMessages sent to parse(), or by // explicitly calling setEnv*() @@ -407,40 +447,41 @@ enum TResourceType { // class TShader { public: - explicit TShader(EShLanguage); - virtual ~TShader(); - void setStrings(const char* const* s, int n); - void setStringsWithLengths(const char* const* s, const int* l, int n); - void setStringsWithLengthsAndNames( + GLSLANG_EXPORT explicit TShader(EShLanguage); + GLSLANG_EXPORT virtual ~TShader(); + GLSLANG_EXPORT void setStrings(const char* const* s, int n); + GLSLANG_EXPORT void setStringsWithLengths( + const char* const* s, const int* l, int n); + GLSLANG_EXPORT void setStringsWithLengthsAndNames( const char* const* s, const int* l, const char* const* names, int n); void setPreamble(const char* s) { preamble = s; } - void setEntryPoint(const char* entryPoint); - void setSourceEntryPoint(const char* sourceEntryPointName); - void addProcesses(const std::vector&); + GLSLANG_EXPORT void setEntryPoint(const char* entryPoint); + GLSLANG_EXPORT void setSourceEntryPoint(const char* sourceEntryPointName); + GLSLANG_EXPORT void addProcesses(const std::vector&); // IO resolver binding data: see comments in ShaderLang.cpp - void setShiftBinding(TResourceType res, unsigned int base); - void setShiftSamplerBinding(unsigned int base); // DEPRECATED: use setShiftBinding - void setShiftTextureBinding(unsigned int base); // DEPRECATED: use setShiftBinding - void setShiftImageBinding(unsigned int base); // DEPRECATED: use setShiftBinding - void setShiftUboBinding(unsigned int base); // DEPRECATED: use setShiftBinding - void setShiftUavBinding(unsigned int base); // DEPRECATED: use setShiftBinding - void setShiftCbufferBinding(unsigned int base); // synonym for setShiftUboBinding - void setShiftSsboBinding(unsigned int base); // DEPRECATED: use setShiftBinding - void setShiftBindingForSet(TResourceType res, unsigned int base, unsigned int set); - void setResourceSetBinding(const std::vector& base); - void setAutoMapBindings(bool map); - void setAutoMapLocations(bool map); - void addUniformLocationOverride(const char* name, int loc); - void setUniformLocationBase(int base); - void setInvertY(bool invert); + GLSLANG_EXPORT void setShiftBinding(TResourceType res, unsigned int base); + GLSLANG_EXPORT void setShiftSamplerBinding(unsigned int base); // DEPRECATED: use setShiftBinding + GLSLANG_EXPORT void setShiftTextureBinding(unsigned int base); // DEPRECATED: use setShiftBinding + GLSLANG_EXPORT void setShiftImageBinding(unsigned int base); // DEPRECATED: use setShiftBinding + GLSLANG_EXPORT void setShiftUboBinding(unsigned int base); // DEPRECATED: use setShiftBinding + GLSLANG_EXPORT void setShiftUavBinding(unsigned int base); // DEPRECATED: use setShiftBinding + GLSLANG_EXPORT void setShiftCbufferBinding(unsigned int base); // synonym for setShiftUboBinding + GLSLANG_EXPORT void setShiftSsboBinding(unsigned int base); // DEPRECATED: use setShiftBinding + GLSLANG_EXPORT void setShiftBindingForSet(TResourceType res, unsigned int base, unsigned int set); + GLSLANG_EXPORT void setResourceSetBinding(const std::vector& base); + GLSLANG_EXPORT void setAutoMapBindings(bool map); + GLSLANG_EXPORT void setAutoMapLocations(bool map); + GLSLANG_EXPORT void addUniformLocationOverride(const char* name, int loc); + GLSLANG_EXPORT void setUniformLocationBase(int base); + GLSLANG_EXPORT void setInvertY(bool invert); #ifdef ENABLE_HLSL - void setHlslIoMapping(bool hlslIoMap); - void setFlattenUniformArrays(bool flatten); + GLSLANG_EXPORT void setHlslIoMapping(bool hlslIoMap); + GLSLANG_EXPORT void setFlattenUniformArrays(bool flatten); #endif - void setNoStorageFormat(bool useUnknownFormat); - void setNanMinMaxClamp(bool nanMinMaxClamp); - void setTextureSamplerTransformMode(EShTextureSamplerTransformMode mode); + GLSLANG_EXPORT void setNoStorageFormat(bool useUnknownFormat); + GLSLANG_EXPORT void setNanMinMaxClamp(bool nanMinMaxClamp); + GLSLANG_EXPORT void setTextureSamplerTransformMode(EShTextureSamplerTransformMode mode); // For setting up the environment (cleared to nothingness in the constructor). // These must be called so that parsing is done for the right source language and @@ -580,8 +621,10 @@ public: virtual void releaseInclude(IncludeResult*) override { } }; - bool parse(const TBuiltInResource*, int defaultVersion, EProfile defaultProfile, bool forceDefaultVersionAndProfile, - bool forwardCompatible, EShMessages, Includer&); + GLSLANG_EXPORT bool parse( + const TBuiltInResource*, int defaultVersion, EProfile defaultProfile, + bool forceDefaultVersionAndProfile, bool forwardCompatible, + EShMessages, Includer&); bool parse(const TBuiltInResource* res, int defaultVersion, EProfile defaultProfile, bool forceDefaultVersionAndProfile, bool forwardCompatible, EShMessages messages) @@ -604,13 +647,14 @@ public: // NOTE: Doing just preprocessing to obtain a correct preprocessed shader string // is not an officially supported or fully working path. - bool preprocess(const TBuiltInResource* builtInResources, - int defaultVersion, EProfile defaultProfile, bool forceDefaultVersionAndProfile, - bool forwardCompatible, EShMessages message, std::string* outputString, - Includer& includer); + GLSLANG_EXPORT bool preprocess( + const TBuiltInResource* builtInResources, int defaultVersion, + EProfile defaultProfile, bool forceDefaultVersionAndProfile, + bool forwardCompatible, EShMessages message, std::string* outputString, + Includer& includer); - const char* getInfoLog(); - const char* getInfoDebugLog(); + GLSLANG_EXPORT const char* getInfoLog(); + GLSLANG_EXPORT const char* getInfoDebugLog(); EShLanguage getStage() const { return stage; } TIntermediate* getIntermediate() const { return intermediate; } @@ -629,11 +673,11 @@ protected: // stringNames is the optional names for all the strings. If stringNames // is null, then none of the strings has name. If a certain element in // stringNames is null, then the corresponding string does not have name. - const char* const* strings; + const char* const* strings; // explicit code to compile, see previous comment const int* lengths; const char* const* stringNames; - const char* preamble; - int numStrings; + int numStrings; // size of the above arrays + const char* preamble; // string of implicit code to compile before the explicitly provided code // a function in the source string can be renamed FROM this TO the name given in setEntryPoint. std::string sourceEntryPointName; @@ -646,7 +690,7 @@ private: TShader& operator=(TShader&); }; -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) // // A reflection database and its interface, consistent with the OpenGL API reflection queries. @@ -655,11 +699,11 @@ private: // Data needed for just a single object at the granularity exchanged by the reflection API class TObjectReflection { public: - TObjectReflection(const std::string& pName, const TType& pType, int pOffset, int pGLDefineType, int pSize, int pIndex); + GLSLANG_EXPORT TObjectReflection(const std::string& pName, const TType& pType, int pOffset, int pGLDefineType, int pSize, int pIndex); - const TType* getType() const { return type; } - int getBinding() const; - void dump() const; + GLSLANG_EXPORT const TType* getType() const { return type; } + GLSLANG_EXPORT int getBinding() const; + GLSLANG_EXPORT void dump() const; static TObjectReflection badReflection() { return TObjectReflection(); } std::string name; @@ -670,6 +714,7 @@ public: int counterIndex; int numMembers; int arrayStride; // stride of an array variable + int topLevelArraySize; // size of the top-level variable in a storage buffer member int topLevelArrayStride; // stride of the top-level variable in a storage buffer member EShLanguageMask stages; @@ -763,7 +808,7 @@ public: virtual void addStage(EShLanguage stage) = 0; }; -#endif // GLSLANG_WEB +#endif // !GLSLANG_WEB && !GLSLANG_ANGLE // Make one TProgram per set of shaders that will get linked together. Add all // the shaders that are to be linked together. After calling shader.parse() @@ -773,40 +818,40 @@ public: // class TProgram { public: - TProgram(); - virtual ~TProgram(); + GLSLANG_EXPORT TProgram(); + GLSLANG_EXPORT virtual ~TProgram(); void addShader(TShader* shader) { stages[shader->stage].push_back(shader); } std::list& getShaders(EShLanguage stage) { return stages[stage]; } // Link Validation interface - bool link(EShMessages); - const char* getInfoLog(); - const char* getInfoDebugLog(); + GLSLANG_EXPORT bool link(EShMessages); + GLSLANG_EXPORT const char* getInfoLog(); + GLSLANG_EXPORT const char* getInfoDebugLog(); TIntermediate* getIntermediate(EShLanguage stage) const { return intermediate[stage]; } -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) // Reflection Interface // call first, to do liveness analysis, index mapping, etc.; returns false on failure - bool buildReflection(int opts = EShReflectionDefault); - unsigned getLocalSize(int dim) const; // return dim'th local size - int getReflectionIndex(const char *name) const; - int getReflectionPipeIOIndex(const char* name, const bool inOrOut) const; - int getNumUniformVariables() const; - const TObjectReflection& getUniform(int index) const; - int getNumUniformBlocks() const; - const TObjectReflection& getUniformBlock(int index) const; - int getNumPipeInputs() const; - const TObjectReflection& getPipeInput(int index) const; - int getNumPipeOutputs() const; - const TObjectReflection& getPipeOutput(int index) const; - int getNumBufferVariables() const; - const TObjectReflection& getBufferVariable(int index) const; - int getNumBufferBlocks() const; - const TObjectReflection& getBufferBlock(int index) const; - int getNumAtomicCounters() const; - const TObjectReflection& getAtomicCounter(int index) const; + GLSLANG_EXPORT bool buildReflection(int opts = EShReflectionDefault); + GLSLANG_EXPORT unsigned getLocalSize(int dim) const; // return dim'th local size + GLSLANG_EXPORT int getReflectionIndex(const char *name) const; + GLSLANG_EXPORT int getReflectionPipeIOIndex(const char* name, const bool inOrOut) const; + GLSLANG_EXPORT int getNumUniformVariables() const; + GLSLANG_EXPORT const TObjectReflection& getUniform(int index) const; + GLSLANG_EXPORT int getNumUniformBlocks() const; + GLSLANG_EXPORT const TObjectReflection& getUniformBlock(int index) const; + GLSLANG_EXPORT int getNumPipeInputs() const; + GLSLANG_EXPORT const TObjectReflection& getPipeInput(int index) const; + GLSLANG_EXPORT int getNumPipeOutputs() const; + GLSLANG_EXPORT const TObjectReflection& getPipeOutput(int index) const; + GLSLANG_EXPORT int getNumBufferVariables() const; + GLSLANG_EXPORT const TObjectReflection& getBufferVariable(int index) const; + GLSLANG_EXPORT int getNumBufferBlocks() const; + GLSLANG_EXPORT const TObjectReflection& getBufferBlock(int index) const; + GLSLANG_EXPORT int getNumAtomicCounters() const; + GLSLANG_EXPORT const TObjectReflection& getAtomicCounter(int index) const; // Legacy Reflection Interface - expressed in terms of above interface @@ -873,22 +918,22 @@ public: // returns a TType* const TType *getAttributeTType(int index) const { return getPipeInput(index).getType(); } - void dumpReflection(); + GLSLANG_EXPORT void dumpReflection(); // I/O mapping: apply base offsets and map live unbound variables // If resolver is not provided it uses the previous approach // and respects auto assignment and offsets. - bool mapIO(TIoMapResolver* pResolver = nullptr, TIoMapper* pIoMapper = nullptr); -#endif + GLSLANG_EXPORT bool mapIO(TIoMapResolver* pResolver = nullptr, TIoMapper* pIoMapper = nullptr); +#endif // !GLSLANG_WEB && !GLSLANG_ANGLE protected: - bool linkStage(EShLanguage, EShMessages); + GLSLANG_EXPORT bool linkStage(EShLanguage, EShMessages); TPoolAllocator* pool; std::list stages[EShLangCount]; TIntermediate* intermediate[EShLangCount]; bool newedIntermediate[EShLangCount]; // track which intermediate were "new" versus reusing a singleton unit in a stage TInfoSink* infoSink; -#ifndef GLSLANG_WEB +#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE) TReflection* reflection; #endif bool linked; diff --git a/thirdparty/glslang/glslang/build_info.h b/thirdparty/glslang/glslang/build_info.h new file mode 100644 index 00000000000..319bfa5f736 --- /dev/null +++ b/thirdparty/glslang/glslang/build_info.h @@ -0,0 +1,62 @@ +// Copyright (C) 2020 The Khronos Group Inc. +// +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions +// are met: +// +// Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// +// Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// +// Neither the name of The Khronos Group Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +// POSSIBILITY OF SUCH DAMAGE. + +#ifndef GLSLANG_BUILD_INFO +#define GLSLANG_BUILD_INFO + +#define GLSLANG_VERSION_MAJOR 11 +#define GLSLANG_VERSION_MINOR 0 +#define GLSLANG_VERSION_PATCH 0 +#define GLSLANG_VERSION_FLAVOR "" + +#define GLSLANG_VERSION_GREATER_THAN(major, minor, patch) \ + (((major) > GLSLANG_VERSION_MAJOR) || ((major) == GLSLANG_VERSION_MAJOR && \ + (((minor) > GLSLANG_VERSION_MINOR) || ((minor) == GLSLANG_VERSION_MINOR && \ + ((patch) > GLSLANG_VERSION_PATCH))))) + +#define GLSLANG_VERSION_GREATER_OR_EQUAL_TO(major, minor, patch) \ + (((major) > GLSLANG_VERSION_MAJOR) || ((major) == GLSLANG_VERSION_MAJOR && \ + (((minor) > GLSLANG_VERSION_MINOR) || ((minor) == GLSLANG_VERSION_MINOR && \ + ((patch) >= GLSLANG_VERSION_PATCH))))) + +#define GLSLANG_VERSION_LESS_THAN(major, minor, patch) \ + (((major) < GLSLANG_VERSION_MAJOR) || ((major) == GLSLANG_VERSION_MAJOR && \ + (((minor) < GLSLANG_VERSION_MINOR) || ((minor) == GLSLANG_VERSION_MINOR && \ + ((patch) < GLSLANG_VERSION_PATCH))))) + +#define GLSLANG_VERSION_LESS_OR_EQUAL_TO(major, minor, patch) \ + (((major) < GLSLANG_VERSION_MAJOR) || ((major) == GLSLANG_VERSION_MAJOR && \ + (((minor) < GLSLANG_VERSION_MINOR) || ((minor) == GLSLANG_VERSION_MINOR && \ + ((patch) <= GLSLANG_VERSION_PATCH))))) + +#endif // GLSLANG_BUILD_INFO diff --git a/thirdparty/vulkan/include/vulkan/vk_icd.h b/thirdparty/vulkan/include/vulkan/vk_icd.h index 5dff59a16ee..fde5bf6214b 100644 --- a/thirdparty/vulkan/include/vulkan/vk_icd.h +++ b/thirdparty/vulkan/include/vulkan/vk_icd.h @@ -41,17 +41,45 @@ // that if the loader is older, it should automatically fail a // call for any API version > 1.0. Otherwise, the loader will // manually determine if it can support the expected version. -#define CURRENT_LOADER_ICD_INTERFACE_VERSION 5 +// Version 6 - Add support for vk_icdEnumerateAdapterPhysicalDevices. +#define CURRENT_LOADER_ICD_INTERFACE_VERSION 6 #define MIN_SUPPORTED_LOADER_ICD_INTERFACE_VERSION 0 #define MIN_PHYS_DEV_EXTENSION_ICD_INTERFACE_VERSION 4 -typedef VkResult(VKAPI_PTR *PFN_vkNegotiateLoaderICDInterfaceVersion)(uint32_t *pVersion); +// Old typedefs that don't follow a proper naming convention but are preserved for compatibility +typedef VkResult(VKAPI_PTR *PFN_vkNegotiateLoaderICDInterfaceVersion)(uint32_t *pVersion); // This is defined in vk_layer.h which will be found by the loader, but if an ICD is building against this // file directly, it won't be found. #ifndef PFN_GetPhysicalDeviceProcAddr typedef PFN_vkVoidFunction(VKAPI_PTR *PFN_GetPhysicalDeviceProcAddr)(VkInstance instance, const char *pName); #endif +// Typedefs for loader/ICD interface +typedef VkResult (VKAPI_PTR *PFN_vk_icdNegotiateLoaderICDInterfaceVersion)(uint32_t* pVersion); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vk_icdGetInstanceProcAddr)(VkInstance instance, const char* pName); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vk_icdGetPhysicalDeviceProcAddr)(VkInstance instance, const char* pName); +#if defined(VK_USE_PLATFORM_WIN32_KHR) +typedef VkResult (VKAPI_PTR *PFN_vk_icdEnumerateAdapterPhysicalDevices)(VkInstance instance, LUID adapterLUID, + uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices); +#endif + +// Prototypes for loader/ICD interface +#if !defined(VK_NO_PROTOTYPES) +#ifdef __cplusplus +extern "C" { +#endif + VKAPI_ATTR VkResult VKAPI_CALL vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pVersion); + VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance, const char* pName); + VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetPhysicalDeviceProcAddr(VkInstance isntance, const char* pName); +#if defined(VK_USE_PLATFORM_WIN32_KHR) + VKAPI_ATTR VkResult VKAPI_CALL vk_icdEnumerateAdapterPhysicalDevices(VkInstance instance, LUID adapterLUID, + uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices); +#endif +#ifdef __cplusplus +} +#endif +#endif + /* * The ICD must reserve space for a pointer for the loader's dispatch * table, at the start of . @@ -91,6 +119,8 @@ typedef enum { VK_ICD_WSI_PLATFORM_DISPLAY, VK_ICD_WSI_PLATFORM_HEADLESS, VK_ICD_WSI_PLATFORM_METAL, + VK_ICD_WSI_PLATFORM_DIRECTFB, + VK_ICD_WSI_PLATFORM_VI, } VkIcdWsiPlatform; typedef struct { @@ -137,6 +167,14 @@ typedef struct { } VkIcdSurfaceXlib; #endif // VK_USE_PLATFORM_XLIB_KHR +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT +typedef struct { + VkIcdSurfaceBase base; + IDirectFB *dfb; + IDirectFBSurface *surface; +} VkIcdSurfaceDirectFB; +#endif // VK_USE_PLATFORM_DIRECTFB_EXT + #ifdef VK_USE_PLATFORM_ANDROID_KHR typedef struct { VkIcdSurfaceBase base; @@ -180,4 +218,11 @@ typedef struct { } VkIcdSurfaceMetal; #endif // VK_USE_PLATFORM_METAL_EXT +#ifdef VK_USE_PLATFORM_VI_NN +typedef struct { + VkIcdSurfaceBase base; + void *window; +} VkIcdSurfaceVi; +#endif // VK_USE_PLATFORM_VI_NN + #endif // VKICD_H diff --git a/thirdparty/vulkan/include/vulkan/vk_layer.h b/thirdparty/vulkan/include/vulkan/vk_layer.h index fa765200897..0651870c70a 100644 --- a/thirdparty/vulkan/include/vulkan/vk_layer.h +++ b/thirdparty/vulkan/include/vulkan/vk_layer.h @@ -83,7 +83,8 @@ typedef VkResult(VKAPI_PTR *PFN_PhysDevExt)(VkPhysicalDevice phys_device); typedef enum VkLayerFunction_ { VK_LAYER_LINK_INFO = 0, VK_LOADER_DATA_CALLBACK = 1, - VK_LOADER_LAYER_CREATE_DEVICE_CALLBACK = 2 + VK_LOADER_LAYER_CREATE_DEVICE_CALLBACK = 2, + VK_LOADER_FEATURES = 3, } VkLayerFunction; typedef struct VkLayerInstanceLink_ { @@ -111,6 +112,12 @@ typedef VkResult (VKAPI_PTR *PFN_vkSetDeviceLoaderData)(VkDevice device, typedef VkResult (VKAPI_PTR *PFN_vkLayerCreateDevice)(VkInstance instance, VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, PFN_vkGetInstanceProcAddr layerGIPA, PFN_vkGetDeviceProcAddr *nextGDPA); typedef void (VKAPI_PTR *PFN_vkLayerDestroyDevice)(VkDevice physicalDevice, const VkAllocationCallbacks *pAllocator, PFN_vkDestroyDevice destroyFunction); + +typedef enum VkLoaderFeastureFlagBits { + VK_LOADER_FEATURE_PHYSICAL_DEVICE_SORTING = 0x00000001, +} VkLoaderFlagBits; +typedef VkFlags VkLoaderFeatureFlags; + typedef struct { VkStructureType sType; // VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO const void *pNext; @@ -119,9 +126,10 @@ typedef struct { VkLayerInstanceLink *pLayerInfo; PFN_vkSetInstanceLoaderData pfnSetInstanceLoaderData; struct { - PFN_vkLayerCreateDevice pfnLayerCreateDevice; - PFN_vkLayerDestroyDevice pfnLayerDestroyDevice; - } layerDevice; + PFN_vkLayerCreateDevice pfnLayerCreateDevice; + PFN_vkLayerDestroyDevice pfnLayerDestroyDevice; + } layerDevice; + VkLoaderFeatureFlags loaderFeatures; } u; } VkLayerInstanceCreateInfo; diff --git a/thirdparty/vulkan/include/vulkan/vk_platform.h b/thirdparty/vulkan/include/vulkan/vk_platform.h index 7289299240a..048322d93b7 100644 --- a/thirdparty/vulkan/include/vulkan/vk_platform.h +++ b/thirdparty/vulkan/include/vulkan/vk_platform.h @@ -2,19 +2,9 @@ // File: vk_platform.h // /* -** Copyright (c) 2014-2017 The Khronos Group Inc. +** Copyright (c) 2014-2020 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ diff --git a/thirdparty/vulkan/include/vulkan/vulkan.h b/thirdparty/vulkan/include/vulkan/vulkan.h index 5f853f9fc8e..b7716ec8eac 100644 --- a/thirdparty/vulkan/include/vulkan/vulkan.h +++ b/thirdparty/vulkan/include/vulkan/vulkan.h @@ -2,19 +2,9 @@ #define VULKAN_H_ 1 /* -** Copyright (c) 2015-2019 The Khronos Group Inc. +** Copyright (c) 2015-2020 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ #include "vk_platform.h" @@ -71,6 +61,12 @@ #endif +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT +#include +#include "vulkan_directfb.h" +#endif + + #ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT #include #include @@ -83,4 +79,9 @@ #include "vulkan_ggp.h" #endif + +#ifdef VK_ENABLE_BETA_EXTENSIONS +#include "vulkan_beta.h" +#endif + #endif // VULKAN_H_ diff --git a/thirdparty/vulkan/include/vulkan/vulkan.hpp b/thirdparty/vulkan/include/vulkan/vulkan.hpp index 441869898fd..68b42afe199 100644 --- a/thirdparty/vulkan/include/vulkan/vulkan.hpp +++ b/thirdparty/vulkan/include/vulkan/vulkan.hpp @@ -1,32 +1,6 @@ // Copyright (c) 2015-2020 The Khronos Group Inc. // -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ---- Exceptions to the Apache 2.0 License: ---- -// -// As an exception, if you use this Software to generate code and portions of -// this Software are embedded into the generated code as a result, you may -// redistribute such product without providing attribution as would otherwise -// be required by Sections 4(a), 4(b) and 4(d) of the License. -// -// In addition, if you combine or link code generated by this Software with -// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 -// ("`Combined Software`") and if a court of competent jurisdiction determines -// that the patent provision (Section 3), the indemnity provision (Section 9) -// or other Section of the License conflicts with the conditions of the -// applicable GPL or LGPL license, you may retroactively and prospectively -// choose to deem waived or otherwise exclude such Section(s) of the License, -// but only in their entirety and only with respect to the Combined Software. +// SPDX-License-Identifier: Apache-2.0 OR MIT // // This header is generated from the Khronos Vulkan XML API Registry. @@ -34,11 +8,30 @@ #ifndef VULKAN_HPP #define VULKAN_HPP +#if defined( _MSVC_LANG ) +# define VULKAN_HPP_CPLUSPLUS _MSVC_LANG +#else +# define VULKAN_HPP_CPLUSPLUS __cplusplus +#endif + +#if 201703L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 20 +#elif 201402L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 17 +#elif 201103L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 14 +#elif 199711L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 11 +#else +# error "vulkan.hpp needs at least c++ standard version 11" +#endif + #include #include #include #include #include +#include #include #include #include @@ -46,7 +39,15 @@ #include #include -#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#if 17 <= VULKAN_HPP_CPP_VERSION +#include +#endif + +#if defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +# if !defined(VULKAN_HPP_NO_SMART_HANDLE) +# define VULKAN_HPP_NO_SMART_HANDLE +# endif +#else # include # include #endif @@ -56,21 +57,39 @@ # define VULKAN_HPP_ASSERT assert #endif +#if !defined(VULKAN_HPP_ASSERT_ON_RESULT) +# define VULKAN_HPP_ASSERT_ON_RESULT VULKAN_HPP_ASSERT +#endif + #if !defined(VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL) # define VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL 1 #endif #if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL == 1 -# if defined(__linux__) || defined(__APPLE__) -# include -# endif - -# if defined(_WIN32) -# include +# if defined( __linux__ ) || defined( __APPLE__ ) +# include +# elif defined( _WIN32 ) +typedef struct HINSTANCE__ * HINSTANCE; +# if defined( _WIN64 ) +typedef int64_t( __stdcall * FARPROC )(); +# else +typedef int( __stdcall * FARPROC )(); +# endif +extern "C" __declspec( dllimport ) HINSTANCE __stdcall LoadLibraryA( char const * lpLibFileName ); +extern "C" __declspec( dllimport ) int __stdcall FreeLibrary( HINSTANCE hLibModule ); +extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE hModule, const char * lpProcName ); # endif #endif -static_assert( VK_HEADER_VERSION == 131 , "Wrong VK_HEADER_VERSION!" ); +#if ( 201711 <= __cpp_impl_three_way_comparison ) && __has_include( ) +# define VULKAN_HPP_HAS_SPACESHIP_OPERATOR +#endif +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) +# include +#endif + + +static_assert( VK_HEADER_VERSION == 154 , "Wrong VK_HEADER_VERSION!" ); // 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default. // To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION @@ -158,6 +177,24 @@ static_assert( VK_HEADER_VERSION == 131 , "Wrong VK_HEADER_VERSION!" ); # endif #endif +#if 14 <= VULKAN_HPP_CPP_VERSION +# define VULKAN_HPP_DEPRECATED( msg ) [[deprecated( msg )]] +#else +# define VULKAN_HPP_DEPRECATED( msg ) +#endif + +#if ( 17 <= VULKAN_HPP_CPP_VERSION ) && !defined( VULKAN_HPP_NO_NODISCARD_WARNINGS ) +# define VULKAN_HPP_NODISCARD [[nodiscard]] +# if defined(VULKAN_HPP_NO_EXCEPTIONS) +# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS [[nodiscard]] +# else +# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS +# endif +#else +# define VULKAN_HPP_NODISCARD +# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS +#endif + #if !defined(VULKAN_HPP_NAMESPACE) #define VULKAN_HPP_NAMESPACE vk #endif @@ -168,53 +205,116 @@ static_assert( VK_HEADER_VERSION == 131 , "Wrong VK_HEADER_VERSION!" ); namespace VULKAN_HPP_NAMESPACE { + #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) template class ArrayProxy { public: - VULKAN_HPP_CONSTEXPR ArrayProxy(std::nullptr_t) VULKAN_HPP_NOEXCEPT - : m_count(0) - , m_ptr(nullptr) + VULKAN_HPP_CONSTEXPR ArrayProxy() VULKAN_HPP_NOEXCEPT + : m_count( 0 ) + , m_ptr( nullptr ) {} - ArrayProxy(typename std::remove_reference::type & ptr) VULKAN_HPP_NOEXCEPT - : m_count(1) - , m_ptr(&ptr) + VULKAN_HPP_CONSTEXPR ArrayProxy( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_count( 0 ) + , m_ptr( nullptr ) {} - ArrayProxy(uint32_t count, T * ptr) VULKAN_HPP_NOEXCEPT - : m_count(count) - , m_ptr(ptr) + ArrayProxy( T & value ) VULKAN_HPP_NOEXCEPT + : m_count( 1 ) + , m_ptr( &value ) + {} + + template ::value, int>::type = 0> + ArrayProxy( typename std::remove_const::type & value ) VULKAN_HPP_NOEXCEPT + : m_count( 1 ) + , m_ptr( &value ) + {} + + ArrayProxy( uint32_t count, T * ptr ) VULKAN_HPP_NOEXCEPT + : m_count( count ) + , m_ptr( ptr ) + {} + + template ::value, int>::type = 0> + ArrayProxy( uint32_t count, typename std::remove_const::type * ptr ) VULKAN_HPP_NOEXCEPT + : m_count( count ) + , m_ptr( ptr ) + {} + + ArrayProxy( std::initializer_list const & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + {} + + template ::value, int>::type = 0> + ArrayProxy( std::initializer_list::type> const & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + {} + + ArrayProxy( std::initializer_list & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + {} + + template ::value, int>::type = 0> + ArrayProxy( std::initializer_list::type> & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) {} template - ArrayProxy(std::array::type, N> & data) VULKAN_HPP_NOEXCEPT - : m_count(N) - , m_ptr(data.data()) + ArrayProxy( std::array const & data ) VULKAN_HPP_NOEXCEPT + : m_count( N ) + , m_ptr( data.data() ) + {} + + template ::value, int>::type = 0> + ArrayProxy( std::array::type, N> const & data ) VULKAN_HPP_NOEXCEPT + : m_count( N ) + , m_ptr( data.data() ) {} template - ArrayProxy(std::array::type, N> const& data) VULKAN_HPP_NOEXCEPT - : m_count(N) - , m_ptr(data.data()) + ArrayProxy( std::array & data ) VULKAN_HPP_NOEXCEPT + : m_count( N ) + , m_ptr( data.data() ) + {} + + template ::value, int>::type = 0> + ArrayProxy( std::array::type, N> & data ) VULKAN_HPP_NOEXCEPT + : m_count( N ) + , m_ptr( data.data() ) {} template ::type>> - ArrayProxy(std::vector::type, Allocator> & data) VULKAN_HPP_NOEXCEPT - : m_count(static_cast(data.size())) - , m_ptr(data.data()) + ArrayProxy( std::vector const & data ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( data.size() ) ) + , m_ptr( data.data() ) + {} + + template ::type>, + typename B = T, + typename std::enable_if::value, int>::type = 0> + ArrayProxy( std::vector::type, Allocator> const & data ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( data.size() ) ) + , m_ptr( data.data() ) {} template ::type>> - ArrayProxy(std::vector::type, Allocator> const& data) VULKAN_HPP_NOEXCEPT - : m_count(static_cast(data.size())) - , m_ptr(data.data()) + ArrayProxy( std::vector & data ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( data.size() ) ) + , m_ptr( data.data() ) {} - ArrayProxy(std::initializer_list::type> const& data) VULKAN_HPP_NOEXCEPT - : m_count(static_cast(data.end() - data.begin())) - , m_ptr(data.begin()) + template ::type>, + typename B = T, + typename std::enable_if::value, int>::type = 0> + ArrayProxy( std::vector::type, Allocator> & data ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( data.size() ) ) + , m_ptr( data.data() ) {} const T * begin() const VULKAN_HPP_NOEXCEPT @@ -229,19 +329,19 @@ namespace VULKAN_HPP_NAMESPACE const T & front() const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT(m_count && m_ptr); + VULKAN_HPP_ASSERT( m_count && m_ptr ); return *m_ptr; } const T & back() const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT(m_count && m_ptr); - return *(m_ptr + m_count - 1); + VULKAN_HPP_ASSERT( m_count && m_ptr ); + return *( m_ptr + m_count - 1 ); } bool empty() const VULKAN_HPP_NOEXCEPT { - return (m_count == 0); + return ( m_count == 0 ); } uint32_t size() const VULKAN_HPP_NOEXCEPT @@ -255,20 +355,315 @@ namespace VULKAN_HPP_NAMESPACE } private: - uint32_t m_count; - T * m_ptr; + uint32_t m_count; + T * m_ptr; + }; + + template + class ArrayProxyNoTemporaries + { + public: + VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries() VULKAN_HPP_NOEXCEPT + : m_count( 0 ) + , m_ptr( nullptr ) + {} + + VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_count( 0 ) + , m_ptr( nullptr ) + {} + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( typename std::remove_const::type & value ) VULKAN_HPP_NOEXCEPT + : m_count( 1 ) + , m_ptr( &value ) + {} + + ArrayProxyNoTemporaries( uint32_t count, T * ptr ) VULKAN_HPP_NOEXCEPT + : m_count( count ) + , m_ptr( ptr ) + {} + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( uint32_t count, typename std::remove_const::type * ptr ) VULKAN_HPP_NOEXCEPT + : m_count( count ) + , m_ptr( ptr ) + {} + + ArrayProxyNoTemporaries( std::initializer_list const & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + {} + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( std::initializer_list::type> const & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + {} + + ArrayProxyNoTemporaries( std::initializer_list & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + {} + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( std::initializer_list::type> & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + {} + + ArrayProxyNoTemporaries( std::initializer_list const && list ) VULKAN_HPP_NOEXCEPT = delete; + ArrayProxyNoTemporaries( std::initializer_list && list ) VULKAN_HPP_NOEXCEPT = delete; + + template + ArrayProxyNoTemporaries( std::array const & data ) VULKAN_HPP_NOEXCEPT + : m_count( N ) + , m_ptr( data.data() ) + {} + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( std::array::type, N> const & data ) VULKAN_HPP_NOEXCEPT + : m_count( N ) + , m_ptr( data.data() ) + {} + + template + ArrayProxyNoTemporaries( std::array & data ) VULKAN_HPP_NOEXCEPT + : m_count( N ) + , m_ptr( data.data() ) + {} + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( std::array::type, N> & data ) VULKAN_HPP_NOEXCEPT + : m_count( N ) + , m_ptr( data.data() ) + {} + + template + ArrayProxyNoTemporaries( std::array const && data ) VULKAN_HPP_NOEXCEPT = delete; + template + ArrayProxyNoTemporaries( std::array && data ) VULKAN_HPP_NOEXCEPT = delete; + + template ::type>> + ArrayProxyNoTemporaries( std::vector const & data ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( data.size() ) ) + , m_ptr( data.data() ) + {} + + template ::type>, + typename B = T, + typename std::enable_if::value, int>::type = 0> + ArrayProxyNoTemporaries( std::vector::type, Allocator> const & data ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( data.size() ) ) + , m_ptr( data.data() ) + {} + + template ::type>> + ArrayProxyNoTemporaries( std::vector & data ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( data.size() ) ) + , m_ptr( data.data() ) + {} + + template ::type>, + typename B = T, + typename std::enable_if::value, int>::type = 0> + ArrayProxyNoTemporaries( std::vector::type, Allocator> & data ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( data.size() ) ) + , m_ptr( data.data() ) + {} + + ArrayProxyNoTemporaries( std::vector const && data ) VULKAN_HPP_NOEXCEPT = delete; + ArrayProxyNoTemporaries( std::vector && data ) VULKAN_HPP_NOEXCEPT = delete; + + const T * begin() const VULKAN_HPP_NOEXCEPT + { + return m_ptr; + } + + const T * end() const VULKAN_HPP_NOEXCEPT + { + return m_ptr + m_count; + } + + const T & front() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_count && m_ptr ); + return *m_ptr; + } + + const T & back() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_count && m_ptr ); + return *( m_ptr + m_count - 1 ); + } + + bool empty() const VULKAN_HPP_NOEXCEPT + { + return ( m_count == 0 ); + } + + uint32_t size() const VULKAN_HPP_NOEXCEPT + { + return m_count; + } + + T * data() const VULKAN_HPP_NOEXCEPT + { + return m_ptr; + } + + private: + uint32_t m_count; + T * m_ptr; }; #endif + template + class ArrayWrapper1D : public std::array + { + public: + VULKAN_HPP_CONSTEXPR ArrayWrapper1D() VULKAN_HPP_NOEXCEPT + : std::array() + {} + + VULKAN_HPP_CONSTEXPR ArrayWrapper1D(std::array const& data) VULKAN_HPP_NOEXCEPT + : std::array(data) + {} + +#if defined(_WIN32) && !defined(_WIN64) + VULKAN_HPP_CONSTEXPR T const& operator[](int index) const VULKAN_HPP_NOEXCEPT + { + return std::array::operator[](index); + } + + VULKAN_HPP_CONSTEXPR T & operator[](int index) VULKAN_HPP_NOEXCEPT + { + return std::array::operator[](index); + } +#endif + + operator T const* () const VULKAN_HPP_NOEXCEPT + { + return this->data(); + } + + operator T * () VULKAN_HPP_NOEXCEPT + { + return this->data(); + } + + template ::value, int>::type = 0> + operator std::string() const + { + return std::string( this->data() ); + } + +#if 17 <= VULKAN_HPP_CPP_VERSION + template ::value, int>::type = 0> + operator std::string_view() const + { + return std::string_view( this->data() ); + } +#endif + + template ::value, int>::type = 0> + bool operator<( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return *static_cast const *>( this ) < *static_cast const *>( &rhs ); + } + + template ::value, int>::type = 0> + bool operator<=( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return *static_cast const *>( this ) <= *static_cast const *>( &rhs ); + } + + template ::value, int>::type = 0> + bool operator>( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return *static_cast const *>( this ) > *static_cast const *>( &rhs ); + } + + template ::value, int>::type = 0> + bool operator>=( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return *static_cast const *>( this ) >= *static_cast const *>( &rhs ); + } + + template ::value, int>::type = 0> + bool operator==( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return *static_cast const *>( this ) == *static_cast const *>( &rhs ); + } + + template ::value, int>::type = 0> + bool operator!=( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return *static_cast const *>( this ) != *static_cast const *>( &rhs ); + } + }; + + // specialization of relational operators between std::string and arrays of chars + template + bool operator<(std::string const& lhs, ArrayWrapper1D const& rhs) VULKAN_HPP_NOEXCEPT + { + return lhs < rhs.data(); + } + + template + bool operator<=(std::string const& lhs, ArrayWrapper1D const& rhs) VULKAN_HPP_NOEXCEPT + { + return lhs <= rhs.data(); + } + + template + bool operator>(std::string const& lhs, ArrayWrapper1D const& rhs) VULKAN_HPP_NOEXCEPT + { + return lhs > rhs.data(); + } + + template + bool operator>=(std::string const& lhs, ArrayWrapper1D const& rhs) VULKAN_HPP_NOEXCEPT + { + return lhs >= rhs.data(); + } + + template + bool operator==(std::string const& lhs, ArrayWrapper1D const& rhs) VULKAN_HPP_NOEXCEPT + { + return lhs == rhs.data(); + } + + template + bool operator!=(std::string const& lhs, ArrayWrapper1D const& rhs) VULKAN_HPP_NOEXCEPT + { + return lhs != rhs.data(); + } + + template + class ArrayWrapper2D : public std::array,N> + { + public: + VULKAN_HPP_CONSTEXPR ArrayWrapper2D() VULKAN_HPP_NOEXCEPT + : std::array, N>() + {} + + VULKAN_HPP_CONSTEXPR ArrayWrapper2D(std::array,N> const& data) VULKAN_HPP_NOEXCEPT + : std::array, N>(*reinterpret_cast,N> const*>(&data)) + {} + }; + template struct FlagTraits { enum { allFlags = 0 }; }; - template + template class Flags { public: + using MaskType = typename std::underlying_type::type; + // constructors VULKAN_HPP_CONSTEXPR Flags() VULKAN_HPP_NOEXCEPT : m_mask(0) @@ -278,7 +673,7 @@ namespace VULKAN_HPP_NAMESPACE : m_mask(static_cast(bit)) {} - VULKAN_HPP_CONSTEXPR Flags(Flags const& rhs) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR Flags(Flags const& rhs) VULKAN_HPP_NOEXCEPT : m_mask(rhs.m_mask) {} @@ -287,35 +682,39 @@ namespace VULKAN_HPP_NAMESPACE {} // relational operators - VULKAN_HPP_CONSTEXPR bool operator<(Flags const& rhs) const VULKAN_HPP_NOEXCEPT +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>(Flags const&) const = default; +#else + VULKAN_HPP_CONSTEXPR bool operator<(Flags const& rhs) const VULKAN_HPP_NOEXCEPT { return m_mask < rhs.m_mask; } - VULKAN_HPP_CONSTEXPR bool operator<=(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator<=(Flags const& rhs) const VULKAN_HPP_NOEXCEPT { return m_mask <= rhs.m_mask; } - VULKAN_HPP_CONSTEXPR bool operator>(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator>(Flags const& rhs) const VULKAN_HPP_NOEXCEPT { return m_mask > rhs.m_mask; } - VULKAN_HPP_CONSTEXPR bool operator>=(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator>=(Flags const& rhs) const VULKAN_HPP_NOEXCEPT { return m_mask >= rhs.m_mask; } - VULKAN_HPP_CONSTEXPR bool operator==(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator==(Flags const& rhs) const VULKAN_HPP_NOEXCEPT { return m_mask == rhs.m_mask; } - VULKAN_HPP_CONSTEXPR bool operator!=(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator!=(Flags const& rhs) const VULKAN_HPP_NOEXCEPT { return m_mask != rhs.m_mask; } +#endif // logical operator VULKAN_HPP_CONSTEXPR bool operator!() const VULKAN_HPP_NOEXCEPT @@ -324,46 +723,46 @@ namespace VULKAN_HPP_NAMESPACE } // bitwise operators - VULKAN_HPP_CONSTEXPR Flags operator&(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR Flags operator&(Flags const& rhs) const VULKAN_HPP_NOEXCEPT { - return Flags(m_mask & rhs.m_mask); + return Flags(m_mask & rhs.m_mask); } - VULKAN_HPP_CONSTEXPR Flags operator|(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR Flags operator|(Flags const& rhs) const VULKAN_HPP_NOEXCEPT { - return Flags(m_mask | rhs.m_mask); + return Flags(m_mask | rhs.m_mask); } - VULKAN_HPP_CONSTEXPR Flags operator^(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR Flags operator^(Flags const& rhs) const VULKAN_HPP_NOEXCEPT { - return Flags(m_mask ^ rhs.m_mask); + return Flags(m_mask ^ rhs.m_mask); } - VULKAN_HPP_CONSTEXPR Flags operator~() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR Flags operator~() const VULKAN_HPP_NOEXCEPT { - return Flags(m_mask ^ FlagTraits::allFlags); + return Flags(m_mask ^ FlagTraits::allFlags); } // assignment operators - Flags & operator=(Flags const& rhs) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 Flags & operator=(Flags const& rhs) VULKAN_HPP_NOEXCEPT { m_mask = rhs.m_mask; return *this; } - Flags & operator|=(Flags const& rhs) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 Flags & operator|=(Flags const& rhs) VULKAN_HPP_NOEXCEPT { m_mask |= rhs.m_mask; return *this; } - Flags & operator&=(Flags const& rhs) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 Flags & operator&=(Flags const& rhs) VULKAN_HPP_NOEXCEPT { m_mask &= rhs.m_mask; return *this; } - Flags & operator^=(Flags const& rhs) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 Flags & operator^=(Flags const& rhs) VULKAN_HPP_NOEXCEPT { m_mask ^= rhs.m_mask; return *this; @@ -384,58 +783,60 @@ namespace VULKAN_HPP_NAMESPACE MaskType m_mask; }; - // relational operators - template - VULKAN_HPP_CONSTEXPR bool operator<(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + // relational operators only needed for pre C++20 + template + VULKAN_HPP_CONSTEXPR bool operator<(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT { return flags > bit; } - template - VULKAN_HPP_CONSTEXPR bool operator<=(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_CONSTEXPR bool operator<=(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT { return flags >= bit; } - template - VULKAN_HPP_CONSTEXPR bool operator>(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_CONSTEXPR bool operator>(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT { return flags < bit; } - template - VULKAN_HPP_CONSTEXPR bool operator>=(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_CONSTEXPR bool operator>=(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT { return flags <= bit; } - template - VULKAN_HPP_CONSTEXPR bool operator==(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_CONSTEXPR bool operator==(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT { return flags == bit; } - template - VULKAN_HPP_CONSTEXPR bool operator!=(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_CONSTEXPR bool operator!=(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT { return flags != bit; } +#endif // bitwise operators - template - VULKAN_HPP_CONSTEXPR Flags operator&(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_CONSTEXPR Flags operator&(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT { return flags & bit; } - template - VULKAN_HPP_CONSTEXPR Flags operator|(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_CONSTEXPR Flags operator|(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT { return flags | bit; } - template - VULKAN_HPP_CONSTEXPR Flags operator^(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_CONSTEXPR Flags operator^(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT { return flags ^ bit; } @@ -456,181 +857,227 @@ namespace VULKAN_HPP_NAMESPACE RefType *m_ptr; }; - template struct isStructureChainValid { enum { value = false }; }; - - template - struct TypeList - { - using list = P; - using last = T; - }; - - template - struct extendCheck - { - static const bool valid = isStructureChainValid::value || extendCheck::valid; - }; - - template - struct extendCheck,X> - { - static const bool valid = isStructureChainValid::value; - }; - - template - struct extendCheck - { - static const bool valid = true; - }; + template struct StructExtends { enum { value = false }; }; template - struct isPartOfStructureChain + struct IsPartOfStructureChain { static const bool valid = false; }; template - struct isPartOfStructureChain + struct IsPartOfStructureChain { - static const bool valid = std::is_same::value || isPartOfStructureChain::valid; + static const bool valid = std::is_same::value || IsPartOfStructureChain::valid; }; - template - class StructureChainElement + template + struct StructureChainContains { - public: - explicit operator Element&() VULKAN_HPP_NOEXCEPT { return value; } - explicit operator const Element&() const VULKAN_HPP_NOEXCEPT { return value; } - private: - Element value; + static const bool value = std::is_same>::type>::value || + StructureChainContains::value; }; - template - class StructureChain : private StructureChainElement... + template + struct StructureChainContains<0, T, ChainElements...> + { + static const bool value = std::is_same>::type>::value; + }; + + template + struct StructureChainValidation + { + using TestType = typename std::tuple_element>::type; + static const bool valid = + StructExtends>::type>::value && + ( TestType::allowDuplicate || !StructureChainContains::value ) && + StructureChainValidation::valid; + }; + + template + struct StructureChainValidation<0, ChainElements...> + { + static const bool valid = true; + }; + + template + class StructureChain : public std::tuple { public: StructureChain() VULKAN_HPP_NOEXCEPT { - link(); + static_assert( StructureChainValidation::valid, + "The structure chain is not valid!" ); + link(); } - StructureChain(StructureChain const &rhs) VULKAN_HPP_NOEXCEPT + StructureChain( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT : std::tuple( rhs ) { - linkAndCopy(rhs); + static_assert( StructureChainValidation::valid, + "The structure chain is not valid!" ); + link(); } - StructureChain(StructureElements const &... elems) VULKAN_HPP_NOEXCEPT + StructureChain( StructureChain && rhs ) VULKAN_HPP_NOEXCEPT + : std::tuple( std::forward>( rhs ) ) { - linkAndCopyElements(elems...); + static_assert( StructureChainValidation::valid, + "The structure chain is not valid!" ); + link(); } - StructureChain& operator=(StructureChain const &rhs) VULKAN_HPP_NOEXCEPT + StructureChain( ChainElements const &... elems ) VULKAN_HPP_NOEXCEPT : std::tuple( elems... ) { - linkAndCopy(rhs); + static_assert( StructureChainValidation::valid, + "The structure chain is not valid!" ); + link(); + } + + StructureChain & operator=( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT + { + std::tuple::operator=( rhs ); + link(); return *this; } - template ClassType& get() VULKAN_HPP_NOEXCEPT { return static_cast(*this);} + StructureChain & operator=( StructureChain && rhs ) = delete; - template - std::tuple get() + template + T & get() VULKAN_HPP_NOEXCEPT { - return std::tuple_cat( - std::make_tuple(get(),get()), - std::make_tuple(get()...) - ); + return std::get::value>( *this ); } - template - void unlink() VULKAN_HPP_NOEXCEPT + template + T const & get() const VULKAN_HPP_NOEXCEPT { - static_assert(isPartOfStructureChain::valid, "Can't unlink Structure that's not part of this StructureChain!"); - static_assert(!std::is_same>::type>::value, "It's not allowed to unlink the first element!"); - VkBaseOutStructure * ptr = reinterpret_cast(&get()); - assert(ptr != nullptr); - VkBaseOutStructure ** ppNext = &(reinterpret_cast(this)->pNext); - assert(*ppNext != nullptr); - while (*ppNext != ptr) - { - ppNext = &(*ppNext)->pNext; - assert(*ppNext != nullptr); // fires, if the ClassType member has already been unlinked ! - } - assert(*ppNext == ptr); - *ppNext = (*ppNext)->pNext; + return std::get::value>( *this ); } - template + template + std::tuple get() VULKAN_HPP_NOEXCEPT + { + return std::tie( get(), get(), get()... ); + } + + template + std::tuple get() const VULKAN_HPP_NOEXCEPT + { + return std::tie( get(), get(), get()... ); + } + + template void relink() VULKAN_HPP_NOEXCEPT { - static_assert(isPartOfStructureChain::valid, "Can't relink Structure that's not part of this StructureChain!"); - static_assert(!std::is_same>::type>::value, "It's not allowed to have the first element unlinked!"); - VkBaseOutStructure * ptr = reinterpret_cast(&get()); - assert(ptr != nullptr); - VkBaseOutStructure ** ppNext = &(reinterpret_cast(this)->pNext); - assert(*ppNext != nullptr); -#if !defined(NDEBUG) - while (*ppNext) - { - assert(*ppNext != ptr); // fires, if the ClassType member has not been unlinked before - ppNext = &(*ppNext)->pNext; - } - ppNext = &(reinterpret_cast(this)->pNext); -#endif - ptr->pNext = *ppNext; - *ppNext = ptr; + static_assert( IsPartOfStructureChain::valid, + "Can't relink Structure that's not part of this StructureChain!" ); + static_assert( + !std::is_same>::type>::value || (Which != 0), + "It's not allowed to have the first element unlinked!" ); + + auto pNext = reinterpret_cast( &get() ); + VULKAN_HPP_ASSERT( !isLinked( pNext ) ); + auto & headElement = std::get<0>( *this ); + pNext->pNext = reinterpret_cast(headElement.pNext); + headElement.pNext = pNext; + } + + template + void unlink() VULKAN_HPP_NOEXCEPT + { + static_assert( IsPartOfStructureChain::valid, + "Can't unlink Structure that's not part of this StructureChain!" ); + static_assert( + !std::is_same>::type>::value || (Which != 0), + "It's not allowed to unlink the first element!" ); + + unlink( reinterpret_cast( &get() ) ); } private: - template - void link() VULKAN_HPP_NOEXCEPT + template + struct ChainElementIndex : ChainElementIndex + {}; + + template + struct ChainElementIndex::value, void>::type, + First, + Types...> : ChainElementIndex + {}; + + template + struct ChainElementIndex::value, void>::type, + First, + Types...> : ChainElementIndex + {}; + + template + struct ChainElementIndex::value, void>::type, + First, + Types...> : std::integral_constant + {}; + + bool isLinked( VkBaseInStructure const * pNext ) { - static_assert(extendCheck::valid, "The structure chain is not valid!"); + VkBaseInStructure const * elementPtr = reinterpret_cast(&std::get<0>( *this )); + while ( elementPtr ) + { + if ( elementPtr->pNext == pNext ) + { + return true; + } + elementPtr = elementPtr->pNext; + } + return false; } - template - void link() VULKAN_HPP_NOEXCEPT + template + typename std::enable_if::type link() VULKAN_HPP_NOEXCEPT { - static_assert(extendCheck::valid, "The structure chain is not valid!"); - X& x = static_cast(*this); - Y& y = static_cast(*this); - x.pNext = &y; - link, Y, Z...>(); + auto & x = std::get( *this ); + x.pNext = &std::get( *this ); + link(); } - template - void linkAndCopy(StructureChain const &rhs) VULKAN_HPP_NOEXCEPT + template + typename std::enable_if::type link() VULKAN_HPP_NOEXCEPT + {} + + template + typename std::enable_if::type unlink( VkBaseOutStructure const * pNext ) VULKAN_HPP_NOEXCEPT { - static_assert(extendCheck::valid, "The structure chain is not valid!"); - static_cast(*this) = static_cast(rhs); + auto & element = std::get( *this ); + if ( element.pNext == pNext ) + { + element.pNext = pNext->pNext; + } + else + { + unlink( pNext ); + } } - template - void linkAndCopy(StructureChain const &rhs) VULKAN_HPP_NOEXCEPT + template + typename std::enable_if::type unlink( VkBaseOutStructure const * pNext ) VULKAN_HPP_NOEXCEPT { - static_assert(extendCheck::valid, "The structure chain is not valid!"); - X& x = static_cast(*this); - Y& y = static_cast(*this); - x = static_cast(rhs); - x.pNext = &y; - linkAndCopy, Y, Z...>(rhs); - } - - template - void linkAndCopyElements(X const &xelem) VULKAN_HPP_NOEXCEPT - { - static_assert(extendCheck::valid, "The structure chain is not valid!"); - static_cast(*this) = xelem; - } - - template - void linkAndCopyElements(X const &xelem, Y const &yelem, Z const &... zelem) VULKAN_HPP_NOEXCEPT - { - static_assert(extendCheck::valid, "The structure chain is not valid!"); - X& x = static_cast(*this); - Y& y = static_cast(*this); - x = xelem; - x.pNext = &y; - linkAndCopyElements, Y, Z...>(yelem, zelem...); + auto & element = std::get<0>( *this ); + if ( element.pNext == pNext ) + { + element.pNext = pNext->pNext; + } + else + { + VULKAN_HPP_ASSERT( false ); // fires, if the ClassType member has already been unlinked ! + } } }; @@ -757,24 +1204,53 @@ namespace VULKAN_HPP_NAMESPACE class DispatchLoaderStatic { public: - VkResult vkCreateInstance( const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance ) const VULKAN_HPP_NOEXCEPT +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkAcquireFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance ); + return ::vkAcquireFullScreenExclusiveModeEXT( device, swapchain ); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex ); } - VkResult vkEnumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties ) const VULKAN_HPP_NOEXCEPT + VkResult vkAcquireNextImageKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex ) const VULKAN_HPP_NOEXCEPT { - return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties ); + return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex ); } - VkResult vkEnumerateInstanceLayerProperties( uint32_t* pPropertyCount, VkLayerProperties* pProperties ) const VULKAN_HPP_NOEXCEPT + VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration ) const VULKAN_HPP_NOEXCEPT { - return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties ); + return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration ); } - VkResult vkEnumerateInstanceVersion( uint32_t* pApiVersion ) const VULKAN_HPP_NOEXCEPT + VkResult vkAcquireProfilingLockKHR( VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkEnumerateInstanceVersion( pApiVersion ); + return ::vkAcquireProfilingLockKHR( device, pInfo ); + } + +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT + VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display ); + } +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + VkResult vkAllocateCommandBuffers( VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers ); + } + + VkResult vkAllocateDescriptorSets( VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets ); + } + + VkResult vkAllocateMemory( VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory ); } VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo ) const VULKAN_HPP_NOEXCEPT @@ -782,6 +1258,55 @@ namespace VULKAN_HPP_NAMESPACE return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo ); } +#ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkBindAccelerationStructureMemoryKHR( VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindAccelerationStructureMemoryKHR( device, bindInfoCount, pBindInfos ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos ); + } + + VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindBufferMemory( device, buffer, memory, memoryOffset ); + } + + VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos ); + } + + VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos ); + } + + VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindImageMemory( device, image, memory, memoryOffset ); + } + + VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos ); + } + + VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos ); + } + +#ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkBuildAccelerationStructureKHR( VkDevice device, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildOffsetInfoKHR* const * ppOffsetInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBuildAccelerationStructureKHR( device, infoCount, pInfos, ppOffsetInfos ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + void vkCmdBeginConditionalRenderingEXT( VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin ); @@ -837,6 +1362,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline ); } + void vkCmdBindPipelineShaderGroupNV( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline, uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindPipelineShaderGroupNV( commandBuffer, pipelineBindPoint, pipeline, groupIndex ); + } + void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout ); @@ -852,12 +1382,36 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets ); } + void vkCmdBindVertexBuffers2EXT( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindVertexBuffers2EXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides ); + } + void vkCmdBlitImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter ); } - void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT + void vkCmdBlitImage2KHR( VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR* pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBlitImage2KHR( commandBuffer, pBlitImageInfo ); + } + +#ifdef VK_ENABLE_BETA_EXTENSIONS + void vkCmdBuildAccelerationStructureIndirectKHR( VkCommandBuffer commandBuffer, const VkAccelerationStructureBuildGeometryInfoKHR* pInfo, VkBuffer indirectBuffer, VkDeviceSize indirectOffset, uint32_t indirectStride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildAccelerationStructureIndirectKHR( commandBuffer, pInfo, indirectBuffer, indirectOffset, indirectStride ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + void vkCmdBuildAccelerationStructureKHR( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildOffsetInfoKHR* const * ppOffsetInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildAccelerationStructureKHR( commandBuffer, infoCount, pInfos, ppOffsetInfos ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureKHR dst, VkAccelerationStructureKHR src, VkBuffer scratch, VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBuildAccelerationStructureNV( commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset ); } @@ -877,31 +1431,72 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges ); } - void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode ) const VULKAN_HPP_NOEXCEPT +#ifdef VK_ENABLE_BETA_EXTENSIONS + void vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyAccelerationStructureKHR( commandBuffer, pInfo ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, VkAccelerationStructureKHR dst, VkAccelerationStructureKHR src, VkCopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode ); } +#ifdef VK_ENABLE_BETA_EXTENSIONS + void vkCmdCopyAccelerationStructureToMemoryKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyAccelerationStructureToMemoryKHR( commandBuffer, pInfo ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions ); } + void vkCmdCopyBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR* pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBuffer2KHR( commandBuffer, pCopyBufferInfo ); + } + void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions ); } + void vkCmdCopyBufferToImage2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2KHR* pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBufferToImage2KHR( commandBuffer, pCopyBufferToImageInfo ); + } + void vkCmdCopyImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); } + void vkCmdCopyImage2KHR( VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR* pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImage2KHR( commandBuffer, pCopyImageInfo ); + } + void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions ); } + void vkCmdCopyImageToBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2KHR* pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImageToBuffer2KHR( commandBuffer, pCopyImageToBufferInfo ); + } + +#ifdef VK_ENABLE_BETA_EXTENSIONS + void vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMemoryToAccelerationStructureKHR( commandBuffer, pInfo ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags ); @@ -1057,6 +1652,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers ); } + void vkCmdExecuteGeneratedCommandsNV( VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdExecuteGeneratedCommandsNV( commandBuffer, isPreprocessed, pGeneratedCommandsInfo ); + } + void vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data ); @@ -1087,9 +1687,9 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdPipelineBarrier( commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers ); } - void vkCmdProcessCommandsNVX( VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdPreprocessGeneratedCommandsNV( VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdProcessCommandsNVX( commandBuffer, pProcessCommandsInfo ); + return ::vkCmdPreprocessGeneratedCommandsNV( commandBuffer, pGeneratedCommandsInfo ); } void vkCmdPushConstants( VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues ) const VULKAN_HPP_NOEXCEPT @@ -1107,11 +1707,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData ); } - void vkCmdReserveSpaceForCommandsNVX( VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdReserveSpaceForCommandsNVX( commandBuffer, pReserveSpaceInfo ); - } - void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdResetEvent( commandBuffer, event, stageMask ); @@ -1127,6 +1722,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdResolveImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); } + void vkCmdResolveImage2KHR( VkCommandBuffer commandBuffer, const VkResolveImageInfo2KHR* pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResolveImage2KHR( commandBuffer, pResolveImageInfo ); + } + void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetBlendConstants( commandBuffer, blendConstants ); @@ -1142,6 +1742,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetCoarseSampleOrderNV( commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders ); } + void vkCmdSetCullModeEXT( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCullModeEXT( commandBuffer, cullMode ); + } + void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); @@ -1152,6 +1757,26 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds ); } + void vkCmdSetDepthBoundsTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBoundsTestEnableEXT( commandBuffer, depthBoundsTestEnable ); + } + + void vkCmdSetDepthCompareOpEXT( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthCompareOpEXT( commandBuffer, depthCompareOp ); + } + + void vkCmdSetDepthTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthTestEnableEXT( commandBuffer, depthTestEnable ); + } + + void vkCmdSetDepthWriteEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthWriteEnableEXT( commandBuffer, depthWriteEnable ); + } + void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetDeviceMask( commandBuffer, deviceMask ); @@ -1177,6 +1802,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetExclusiveScissorNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors ); } + void vkCmdSetFrontFaceEXT( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetFrontFaceEXT( commandBuffer, frontFace ); + } + void vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetLineStippleEXT( commandBuffer, lineStippleFactor, lineStipplePattern ); @@ -1202,6 +1832,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetPerformanceStreamMarkerINTEL( commandBuffer, pMarkerInfo ); } + void vkCmdSetPrimitiveTopologyEXT( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPrimitiveTopologyEXT( commandBuffer, primitiveTopology ); + } + void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo ); @@ -1212,16 +1847,31 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors ); } + void vkCmdSetScissorWithCountEXT( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetScissorWithCountEXT( commandBuffer, scissorCount, pScissors ); + } + void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask ); } + void vkCmdSetStencilOpEXT( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilOpEXT( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp ); + } + void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference ); } + void vkCmdSetStencilTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilTestEnableEXT( commandBuffer, stencilTestEnable ); + } + void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask ); @@ -1242,6 +1892,25 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings ); } + void vkCmdSetViewportWithCountEXT( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportWithCountEXT( commandBuffer, viewportCount, pViewports ); + } + +#ifdef VK_ENABLE_BETA_EXTENSIONS + void vkCmdTraceRaysIndirectKHR( VkCommandBuffer commandBuffer, const VkStridedBufferRegionKHR* pRaygenShaderBindingTable, const VkStridedBufferRegionKHR* pMissShaderBindingTable, const VkStridedBufferRegionKHR* pHitShaderBindingTable, const VkStridedBufferRegionKHR* pCallableShaderBindingTable, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysIndirectKHR( commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, buffer, offset ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + void vkCmdTraceRaysKHR( VkCommandBuffer commandBuffer, const VkStridedBufferRegionKHR* pRaygenShaderBindingTable, const VkStridedBufferRegionKHR* pMissShaderBindingTable, const VkStridedBufferRegionKHR* pHitShaderBindingTable, const VkStridedBufferRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysKHR( commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, width, height, depth ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdTraceRaysNV( commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth ); @@ -1257,7 +1926,14 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdWaitEvents( commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers ); } - void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT +#ifdef VK_ENABLE_BETA_EXTENSIONS + void vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteAccelerationStructuresPropertiesKHR( commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdWriteAccelerationStructuresPropertiesNV( commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); } @@ -1272,103 +1948,51 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query ); } - VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEndCommandBuffer( commandBuffer ); - } - - VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetCommandBuffer( commandBuffer, flags ); - } - -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkAcquireFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireFullScreenExclusiveModeEXT( device, swapchain ); - } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex ); - } - - VkResult vkAcquireNextImageKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex ); - } - - VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration ); - } - - VkResult vkAcquireProfilingLockKHR( VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireProfilingLockKHR( device, pInfo ); - } - - VkResult vkAllocateCommandBuffers( VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers ); - } - - VkResult vkAllocateDescriptorSets( VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets ); - } - - VkResult vkAllocateMemory( VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory ); - } - - VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos ); - } - - VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindBufferMemory( device, buffer, memory, memoryOffset ); - } - - VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos ); - } - - VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos ); - } - - VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindImageMemory( device, image, memory, memoryOffset ); - } - - VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos ); - } - - VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos ); - } - VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT { return ::vkCompileDeferredNV( device, pipeline, shader ); } +#ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkCopyAccelerationStructureKHR( VkDevice device, const VkCopyAccelerationStructureInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyAccelerationStructureKHR( device, pInfo ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkCopyAccelerationStructureToMemoryKHR( VkDevice device, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyAccelerationStructureToMemoryKHR( device, pInfo ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkCopyMemoryToAccelerationStructureKHR( VkDevice device, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMemoryToAccelerationStructureKHR( device, pInfo ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkCreateAccelerationStructureKHR( VkDevice device, const VkAccelerationStructureCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureKHR* pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateAccelerationStructureKHR( device, pCreateInfo, pAllocator, pAccelerationStructure ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + VkResult vkCreateAccelerationStructureNV( VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure ); } +#ifdef VK_USE_PLATFORM_ANDROID_KHR + VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + VkResult vkCreateBuffer( VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer ); @@ -1389,6 +2013,23 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); } + VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback ); + } + + VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger ); + } + +#ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkCreateDeferredOperationKHR( VkDevice device, const VkAllocationCallbacks* pAllocator, VkDeferredOperationKHR* pDeferredOperation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDeferredOperationKHR( device, pAllocator, pDeferredOperation ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + VkResult vkCreateDescriptorPool( VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool ); @@ -1409,6 +2050,28 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); } + VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice ); + } + +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + VkResult vkCreateDirectFBSurfaceEXT( VkInstance instance, const VkDirectFBSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDirectFBSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + } +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode ); + } + + VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } + VkResult vkCreateEvent( VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent ); @@ -1429,26 +2092,59 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); } + VkResult vkCreateHeadlessSurfaceEXT( VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateHeadlessSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + } + +#ifdef VK_USE_PLATFORM_IOS_MVK + VkResult vkCreateIOSSurfaceMVK( VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); + } +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + VkResult vkCreateImage( VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage ); } +#ifdef VK_USE_PLATFORM_FUCHSIA + VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface ); + } +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + VkResult vkCreateImageView( VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView ); } - VkResult vkCreateIndirectCommandsLayoutNVX( VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateIndirectCommandsLayoutNV( VkDevice device, const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNV* pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateIndirectCommandsLayoutNVX( device, pCreateInfo, pAllocator, pIndirectCommandsLayout ); + return ::vkCreateIndirectCommandsLayoutNV( device, pCreateInfo, pAllocator, pIndirectCommandsLayout ); } - VkResult vkCreateObjectTableNVX( VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateInstance( const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateObjectTableNVX( device, pCreateInfo, pAllocator, pObjectTable ); + return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance ); } +#ifdef VK_USE_PLATFORM_MACOS_MVK + VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); + } +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + +#ifdef VK_USE_PLATFORM_METAL_EXT + VkResult vkCreateMetalSurfaceEXT( VkInstance instance, const VkMetalSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateMetalSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + } +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + VkResult vkCreatePipelineCache( VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache ) const VULKAN_HPP_NOEXCEPT { return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache ); @@ -1459,11 +2155,23 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout ); } + VkResult vkCreatePrivateDataSlotEXT( VkDevice device, const VkPrivateDataSlotCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlotEXT* pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePrivateDataSlotEXT( device, pCreateInfo, pAllocator, pPrivateDataSlot ); + } + VkResult vkCreateQueryPool( VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool ); } +#ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkCreateRayTracingPipelinesKHR( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRayTracingPipelinesKHR( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + VkResult vkCreateRayTracingPipelinesNV( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateRayTracingPipelinesNV( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); @@ -1514,6 +2222,13 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains ); } +#ifdef VK_USE_PLATFORM_GGP + VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance, const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface ); + } +#endif /*VK_USE_PLATFORM_GGP*/ + VkResult vkCreateSwapchainKHR( VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain ); @@ -1524,6 +2239,41 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache ); } +#ifdef VK_USE_PLATFORM_VI_NN + VkResult vkCreateViSurfaceNN( VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface ); + } +#endif /*VK_USE_PLATFORM_VI_NN*/ + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkCreateWin32SurfaceKHR( VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_XCB_KHR + VkResult vkCreateXcbSurfaceKHR( VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_KHR + VkResult vkCreateXlibSurfaceKHR( VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo ); @@ -1534,7 +2284,26 @@ namespace VULKAN_HPP_NAMESPACE return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo ); } - void vkDestroyAccelerationStructureNV( VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDebugReportMessageEXT( VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage ); + } + +#ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkDeferredOperationJoinKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDeferredOperationJoinKHR( device, operation ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + void vkDestroyAccelerationStructureKHR( VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyAccelerationStructureKHR( device, accelerationStructure, pAllocator ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + void vkDestroyAccelerationStructureNV( VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator ); } @@ -1554,6 +2323,23 @@ namespace VULKAN_HPP_NAMESPACE return ::vkDestroyCommandPool( device, commandPool, pAllocator ); } + void vkDestroyDebugReportCallbackEXT( VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator ); + } + + void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator ); + } + +#ifdef VK_ENABLE_BETA_EXTENSIONS + void vkDestroyDeferredOperationKHR( VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDeferredOperationKHR( device, operation, pAllocator ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator ); @@ -1604,14 +2390,14 @@ namespace VULKAN_HPP_NAMESPACE return ::vkDestroyImageView( device, imageView, pAllocator ); } - void vkDestroyIndirectCommandsLayoutNVX( VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyIndirectCommandsLayoutNV( VkDevice device, VkIndirectCommandsLayoutNV indirectCommandsLayout, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyIndirectCommandsLayoutNVX( device, indirectCommandsLayout, pAllocator ); + return ::vkDestroyIndirectCommandsLayoutNV( device, indirectCommandsLayout, pAllocator ); } - void vkDestroyObjectTableNVX( VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyObjectTableNVX( device, objectTable, pAllocator ); + return ::vkDestroyInstance( instance, pAllocator ); } void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT @@ -1629,6 +2415,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator ); } + void vkDestroyPrivateDataSlotEXT( VkDevice device, VkPrivateDataSlotEXT privateDataSlot, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPrivateDataSlotEXT( device, privateDataSlot, pAllocator ); + } + void vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyQueryPool( device, queryPool, pAllocator ); @@ -1664,6 +2455,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkDestroyShaderModule( device, shaderModule, pAllocator ); } + void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySurfaceKHR( instance, surface, pAllocator ); + } + void vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroySwapchainKHR( device, swapchain, pAllocator ); @@ -1684,6 +2480,56 @@ namespace VULKAN_HPP_NAMESPACE return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo ); } + VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEndCommandBuffer( commandBuffer ); + } + + VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties ); + } + + VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkEnumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties ); + } + + VkResult vkEnumerateInstanceLayerProperties( uint32_t* pPropertyCount, VkLayerProperties* pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties ); + } + + VkResult vkEnumerateInstanceVersion( uint32_t* pApiVersion ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateInstanceVersion( pApiVersion ); + } + + VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); + } + + VkResult vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); + } + + VkResult vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, VkPerformanceCounterDescriptionKHR* pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions ); + } + + VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices ); + } + VkResult vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges ) const VULKAN_HPP_NOEXCEPT { return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); @@ -1704,11 +2550,25 @@ namespace VULKAN_HPP_NAMESPACE return ::vkFreeMemory( device, memory, pAllocator ); } - VkResult vkGetAccelerationStructureHandleNV( VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT +#ifdef VK_ENABLE_BETA_EXTENSIONS + VkDeviceAddress vkGetAccelerationStructureDeviceAddressKHR( VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureDeviceAddressKHR( device, pInfo ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + VkResult vkGetAccelerationStructureHandleNV( VkDevice device, VkAccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT { return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData ); } +#ifdef VK_ENABLE_BETA_EXTENSIONS + void vkGetAccelerationStructureMemoryRequirementsKHR( VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoKHR* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureMemoryRequirementsKHR( device, pInfo, pMemoryRequirements ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT { return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); @@ -1766,6 +2626,20 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation ); } +#ifdef VK_ENABLE_BETA_EXTENSIONS + uint32_t vkGetDeferredOperationMaxConcurrencyKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeferredOperationMaxConcurrencyKHR( device, operation ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkGetDeferredOperationResultKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeferredOperationResultKHR( device, operation ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + void vkGetDescriptorSetLayoutSupport( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport ); @@ -1776,6 +2650,13 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport ); } +#ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkGetDeviceAccelerationStructureCompatibilityKHR( VkDevice device, const VkAccelerationStructureVersionKHR* version ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceAccelerationStructureCompatibilityKHR( device, version ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); @@ -1833,6 +2714,31 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue ); } + VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties ); + } + + VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties ); + } + + VkResult vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities ); + } + + VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities ); + } + + VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays ); + } + VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT { return ::vkGetEventStatus( device, event ); @@ -1855,6 +2761,11 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VK_USE_PLATFORM_WIN32_KHR*/ + void vkGetGeneratedCommandsMemoryRequirementsNV( VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetGeneratedCommandsMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); + } + VkResult vkGetImageDrmFormatModifierPropertiesEXT( VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties ); @@ -1895,11 +2806,21 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout ); } + VkResult vkGetImageViewAddressNVX( VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX* pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageViewAddressNVX( device, imageView, pProperties ); + } + uint32_t vkGetImageViewHandleNVX( VkDevice device, const VkImageViewHandleInfoNVX* pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkGetImageViewHandleNVX( device, pInfo ); } + PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char* pName ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetInstanceProcAddr( instance, pName ); + } + #ifdef VK_USE_PLATFORM_ANDROID_KHR VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer ) const VULKAN_HPP_NOEXCEPT { @@ -1953,498 +2874,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPerformanceParameterINTEL( device, parameter, pValue ); } - VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData ); - } - - VkResult vkGetPipelineExecutableInternalRepresentationsKHR( VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineExecutableInternalRepresentationsKHR( device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations ); - } - - VkResult vkGetPipelineExecutablePropertiesKHR( VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineExecutablePropertiesKHR( device, pPipelineInfo, pExecutableCount, pProperties ); - } - - VkResult vkGetPipelineExecutableStatisticsKHR( VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineExecutableStatisticsKHR( device, pExecutableInfo, pStatisticCount, pStatistics ); - } - - VkResult vkGetQueryPoolResults( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags ); - } - - VkResult vkGetRayTracingShaderGroupHandlesNV( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData ); - } - - VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties ); - } - - void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity ); - } - - VkResult vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t* pValue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSemaphoreCounterValue( device, semaphore, pValue ); - } - - VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, VkSemaphore semaphore, uint64_t* pValue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSemaphoreCounterValueKHR( device, semaphore, pValue ); - } - - VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd ); - } - -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkGetSemaphoreWin32HandleKHR( VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); - } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - VkResult vkGetShaderInfoAMD( VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo ); - } - - VkResult vkGetSwapchainCounterEXT( VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue ); - } - - VkResult vkGetSwapchainImagesKHR( VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages ); - } - - VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSwapchainStatusKHR( device, swapchain ); - } - - VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData ); - } - - VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkImportFenceFdKHR( device, pImportFenceFdInfo ); - } - -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo ); - } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo ); - } - -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo ); - } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - VkResult vkInitializePerformanceApiINTEL( VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo ); - } - - VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges ) const VULKAN_HPP_NOEXCEPT - { - return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); - } - - VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkMapMemory( device, memory, offset, size, flags, ppData ); - } - - VkResult vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches ) const VULKAN_HPP_NOEXCEPT - { - return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches ); - } - - VkResult vkMergeValidationCachesEXT( VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches ) const VULKAN_HPP_NOEXCEPT - { - return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches ); - } - - VkResult vkRegisterDeviceEventEXT( VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence ); - } - - VkResult vkRegisterDisplayEventEXT( VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence ); - } - - VkResult vkRegisterObjectsNVX( VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices ) const VULKAN_HPP_NOEXCEPT - { - return ::vkRegisterObjectsNVX( device, objectTable, objectCount, ppObjectTableEntries, pObjectIndices ); - } - -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkReleaseFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT - { - return ::vkReleaseFullScreenExclusiveModeEXT( device, swapchain ); - } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - VkResult vkReleasePerformanceConfigurationINTEL( VkDevice device, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT - { - return ::vkReleasePerformanceConfigurationINTEL( device, configuration ); - } - - void vkReleaseProfilingLockKHR( VkDevice device ) const VULKAN_HPP_NOEXCEPT - { - return ::vkReleaseProfilingLockKHR( device ); - } - - VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetCommandPool( device, commandPool, flags ); - } - - VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetDescriptorPool( device, descriptorPool, flags ); - } - - VkResult vkResetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetEvent( device, event ); - } - - VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetFences( device, fenceCount, pFences ); - } - - void vkResetQueryPool( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetQueryPool( device, queryPool, firstQuery, queryCount ); - } - - void vkResetQueryPoolEXT( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount ); - } - - VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo ); - } - - VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo ); - } - - VkResult vkSetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetEvent( device, event ); - } - - void vkSetHdrMetadataEXT( VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata ); - } - - void vkSetLocalDimmingAMD( VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable ); - } - - VkResult vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSignalSemaphore( device, pSignalInfo ); - } - - VkResult vkSignalSemaphoreKHR( VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSignalSemaphoreKHR( device, pSignalInfo ); - } - - void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkTrimCommandPool( device, commandPool, flags ); - } - - void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkTrimCommandPoolKHR( device, commandPool, flags ); - } - - void vkUninitializePerformanceApiINTEL( VkDevice device ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUninitializePerformanceApiINTEL( device ); - } - - void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUnmapMemory( device, memory ); - } - - VkResult vkUnregisterObjectsNVX( VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUnregisterObjectsNVX( device, objectTable, objectCount, pObjectEntryTypes, pObjectIndices ); - } - - void vkUpdateDescriptorSetWithTemplate( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData ); - } - - void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData ); - } - - void vkUpdateDescriptorSets( VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies ); - } - - VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout ); - } - - VkResult vkWaitSemaphores( VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWaitSemaphores( device, pWaitInfo, timeout ); - } - - VkResult vkWaitSemaphoresKHR( VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout ); - } - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - - VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback ); - } - - VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger ); - } - - VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } - - VkResult vkCreateHeadlessSurfaceEXT( VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateHeadlessSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); - } - -#ifdef VK_USE_PLATFORM_IOS_MVK - VkResult vkCreateIOSSurfaceMVK( VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); - } -#endif /*VK_USE_PLATFORM_IOS_MVK*/ - -#ifdef VK_USE_PLATFORM_FUCHSIA - VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface ); - } -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - -#ifdef VK_USE_PLATFORM_MACOS_MVK - VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); - } -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ - -#ifdef VK_USE_PLATFORM_METAL_EXT - VkResult vkCreateMetalSurfaceEXT( VkInstance instance, const VkMetalSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateMetalSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); - } -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - -#ifdef VK_USE_PLATFORM_GGP - VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance, const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface ); - } -#endif /*VK_USE_PLATFORM_GGP*/ - -#ifdef VK_USE_PLATFORM_VI_NN - VkResult vkCreateViSurfaceNN( VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface ); - } -#endif /*VK_USE_PLATFORM_VI_NN*/ - -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkCreateWin32SurfaceKHR( VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VK_USE_PLATFORM_XCB_KHR - VkResult vkCreateXcbSurfaceKHR( VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } -#endif /*VK_USE_PLATFORM_XCB_KHR*/ - -#ifdef VK_USE_PLATFORM_XLIB_KHR - VkResult vkCreateXlibSurfaceKHR( VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - - void vkDebugReportMessageEXT( VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage ); - } - - void vkDestroyDebugReportCallbackEXT( VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator ); - } - - void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator ); - } - - void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyInstance( instance, pAllocator ); - } - - void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySurfaceKHR( instance, surface, pAllocator ); - } - - VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); - } - - VkResult vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); - } - - VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices ); - } - - PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char* pName ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetInstanceProcAddr( instance, pName ); - } - - void vkSubmitDebugUtilsMessageEXT( VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData ); - } - -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display ); - } -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - - VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice ); - } - - VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode ); - } - - VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties ); - } - - VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties ); - } - - VkResult vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, VkPerformanceCounterDescriptionKHR* pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions ); - } - - VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties ); - } - - VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties ); - } - - VkResult vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities ); - } - - VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities ); - } - - VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays ); - } - VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains ); @@ -2455,6 +2884,13 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( physicalDevice, pPropertyCount, pProperties ); } +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + VkBool32 vkGetPhysicalDeviceDirectFBPresentationSupportEXT( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, IDirectFB* dfb ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDirectFBPresentationSupportEXT( physicalDevice, queueFamilyIndex, dfb ); + } +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties ); @@ -2540,11 +2976,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties ); } - void vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, VkDeviceGeneratedCommandsLimitsNVX* pLimits ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( physicalDevice, pFeatures, pLimits ); - } - VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties ); @@ -2715,6 +3146,41 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VK_USE_PLATFORM_XLIB_KHR*/ + VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData ); + } + + VkResult vkGetPipelineExecutableInternalRepresentationsKHR( VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutableInternalRepresentationsKHR( device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations ); + } + + VkResult vkGetPipelineExecutablePropertiesKHR( VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutablePropertiesKHR( device, pPipelineInfo, pExecutableCount, pProperties ); + } + + VkResult vkGetPipelineExecutableStatisticsKHR( VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutableStatisticsKHR( device, pExecutableInfo, pStatisticCount, pStatistics ); + } + + void vkGetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlotEXT privateDataSlot, uint64_t* pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, pData ); + } + + VkResult vkGetQueryPoolResults( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags ); + } + + void vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData ); + } + #ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT VkResult vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay ) const VULKAN_HPP_NOEXCEPT { @@ -2722,14 +3188,129 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT +#ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT { - return ::vkReleaseDisplayEXT( physicalDevice, display ); + return ::vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkGetRayTracingShaderGroupHandlesKHR( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + VkResult vkGetRayTracingShaderGroupHandlesNV( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData ); } - void vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData ); + return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties ); + } + + void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity ); + } + + VkResult vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t* pValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreCounterValue( device, semaphore, pValue ); + } + + VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, VkSemaphore semaphore, uint64_t* pValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreCounterValueKHR( device, semaphore, pValue ); + } + + VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd ); + } + +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkGetSemaphoreWin32HandleKHR( VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + VkResult vkGetShaderInfoAMD( VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo ); + } + + VkResult vkGetSwapchainCounterEXT( VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue ); + } + + VkResult vkGetSwapchainImagesKHR( VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages ); + } + + VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSwapchainStatusKHR( device, swapchain ); + } + + VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData ); + } + + VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportFenceFdKHR( device, pImportFenceFdInfo ); + } + +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo ); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo ); + } + +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo ); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + VkResult vkInitializePerformanceApiINTEL( VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo ); + } + + VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); + } + + VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMapMemory( device, memory, offset, size, flags, ppData ); + } + + VkResult vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches ); + } + + VkResult vkMergeValidationCachesEXT( VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches ); } void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT @@ -2771,6 +3352,175 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkQueueWaitIdle( queue ); } + + VkResult vkRegisterDeviceEventEXT( VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence ); + } + + VkResult vkRegisterDisplayEventEXT( VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence ); + } + + VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseDisplayEXT( physicalDevice, display ); + } + +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkReleaseFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseFullScreenExclusiveModeEXT( device, swapchain ); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + VkResult vkReleasePerformanceConfigurationINTEL( VkDevice device, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleasePerformanceConfigurationINTEL( device, configuration ); + } + + void vkReleaseProfilingLockKHR( VkDevice device ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseProfilingLockKHR( device ); + } + + VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetCommandBuffer( commandBuffer, flags ); + } + + VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetCommandPool( device, commandPool, flags ); + } + + VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetDescriptorPool( device, descriptorPool, flags ); + } + + VkResult vkResetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetEvent( device, event ); + } + + VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetFences( device, fenceCount, pFences ); + } + + void vkResetQueryPool( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetQueryPool( device, queryPool, firstQuery, queryCount ); + } + + void vkResetQueryPoolEXT( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount ); + } + + VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo ); + } + + VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo ); + } + + VkResult vkSetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetEvent( device, event ); + } + + void vkSetHdrMetadataEXT( VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata ); + } + + void vkSetLocalDimmingAMD( VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable ); + } + + VkResult vkSetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlotEXT privateDataSlot, uint64_t data ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, data ); + } + + VkResult vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSignalSemaphore( device, pSignalInfo ); + } + + VkResult vkSignalSemaphoreKHR( VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSignalSemaphoreKHR( device, pSignalInfo ); + } + + void vkSubmitDebugUtilsMessageEXT( VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData ); + } + + void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkTrimCommandPool( device, commandPool, flags ); + } + + void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkTrimCommandPoolKHR( device, commandPool, flags ); + } + + void vkUninitializePerformanceApiINTEL( VkDevice device ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUninitializePerformanceApiINTEL( device ); + } + + void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUnmapMemory( device, memory ); + } + + void vkUpdateDescriptorSetWithTemplate( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData ); + } + + void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData ); + } + + void vkUpdateDescriptorSets( VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies ); + } + + VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout ); + } + + VkResult vkWaitSemaphores( VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitSemaphores( device, pWaitInfo, timeout ); + } + + VkResult vkWaitSemaphoresKHR( VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout ); + } + +#ifdef VK_ENABLE_BETA_EXTENSIONS + VkResult vkWriteAccelerationStructuresPropertiesKHR( VkDevice device, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, size_t dataSize, void* pData, size_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWriteAccelerationStructuresPropertiesKHR( device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ }; #endif @@ -2783,11 +3533,21 @@ namespace VULKAN_HPP_NAMESPACE # endif #endif +#if defined(_WIN32) && defined(VULKAN_HPP_STORAGE_SHARED) +# ifdef VULKAN_HPP_STORAGE_SHARED_EXPORT +# define VULKAN_HPP_STORAGE_API __declspec( dllexport ) +# else +# define VULKAN_HPP_STORAGE_API __declspec( dllimport ) +# endif +#else +# define VULKAN_HPP_STORAGE_API +#endif + #if !defined(VULKAN_HPP_DEFAULT_DISPATCHER) # if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 # define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::defaultDispatchLoaderDynamic -# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE namespace VULKAN_HPP_NAMESPACE { DispatchLoaderDynamic defaultDispatchLoaderDynamic; } - extern DispatchLoaderDynamic defaultDispatchLoaderDynamic; +# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic; } + extern VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic; # else # define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic() # define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE @@ -2800,6 +3560,16 @@ namespace VULKAN_HPP_NAMESPACE # else # define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic # endif +#endif + +#if defined( VULKAN_HPP_NO_DEFAULT_DISPATCHER ) +# define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT +# define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT +# define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT +#else +# define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT = {} +# define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT = nullptr +# define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT = VULKAN_HPP_DEFAULT_DISPATCHER #endif struct AllocationCallbacks; @@ -2808,17 +3578,16 @@ namespace VULKAN_HPP_NAMESPACE class ObjectDestroy { public: - ObjectDestroy() - : m_owner() - , m_allocationCallbacks( nullptr ) - , m_dispatch( nullptr ) - {} + ObjectDestroy() = default; - ObjectDestroy( OwnerType owner, Optional allocationCallbacks = nullptr, Dispatch const &dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT - : m_owner( owner ) - , m_allocationCallbacks( allocationCallbacks ) - , m_dispatch( &dispatch ) - {} + ObjectDestroy( OwnerType owner, + Optional allocationCallbacks + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + {} OwnerType getOwner() const VULKAN_HPP_NOEXCEPT { return m_owner; } Optional getAllocator() const VULKAN_HPP_NOEXCEPT { return m_allocationCallbacks; } @@ -2827,14 +3596,14 @@ namespace VULKAN_HPP_NAMESPACE template void destroy(T t) VULKAN_HPP_NOEXCEPT { - assert( m_owner && m_dispatch ); + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); m_owner.destroy( t, m_allocationCallbacks, *m_dispatch ); } private: - OwnerType m_owner; - Optional m_allocationCallbacks; - Dispatch const* m_dispatch; + OwnerType m_owner = {}; + Optional m_allocationCallbacks = nullptr; + Dispatch const * m_dispatch = nullptr; }; class NoParent; @@ -2843,15 +3612,13 @@ namespace VULKAN_HPP_NAMESPACE class ObjectDestroy { public: - ObjectDestroy() - : m_allocationCallbacks( nullptr ) - , m_dispatch( nullptr ) - {} + ObjectDestroy() = default; - ObjectDestroy( Optional allocationCallbacks, Dispatch const &dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT - : m_allocationCallbacks( allocationCallbacks ) - , m_dispatch( &dispatch ) - {} + ObjectDestroy( Optional allocationCallbacks, + Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT + : m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + {} Optional getAllocator() const VULKAN_HPP_NOEXCEPT { return m_allocationCallbacks; } @@ -2859,53 +3626,62 @@ namespace VULKAN_HPP_NAMESPACE template void destroy(T t) VULKAN_HPP_NOEXCEPT { - assert( m_dispatch ); + VULKAN_HPP_ASSERT( m_dispatch ); t.destroy( m_allocationCallbacks, *m_dispatch ); } private: - Optional m_allocationCallbacks; - Dispatch const* m_dispatch; + Optional m_allocationCallbacks = nullptr; + Dispatch const * m_dispatch = nullptr; }; template class ObjectFree { - public: - ObjectFree() - : m_owner() - , m_allocationCallbacks( nullptr ) - , m_dispatch( nullptr ) - {} + public: + ObjectFree() = default; - ObjectFree( OwnerType owner, Optional allocationCallbacks, Dispatch const &dispatch ) VULKAN_HPP_NOEXCEPT - : m_owner( owner ) - , m_allocationCallbacks( allocationCallbacks ) - , m_dispatch( &dispatch ) - {} + ObjectFree( OwnerType owner, + Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + {} - OwnerType getOwner() const VULKAN_HPP_NOEXCEPT { return m_owner; } - Optional getAllocator() const VULKAN_HPP_NOEXCEPT { return m_allocationCallbacks; } + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } - protected: - template - void destroy(T t) VULKAN_HPP_NOEXCEPT - { - assert( m_owner && m_dispatch ); - m_owner.free( t, m_allocationCallbacks, *m_dispatch ); - } + Optional getAllocator() const VULKAN_HPP_NOEXCEPT + { + return m_allocationCallbacks; + } - private: - OwnerType m_owner; - Optional m_allocationCallbacks; - Dispatch const* m_dispatch; + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); + m_owner.free( t, m_allocationCallbacks, *m_dispatch ); + } + + private: + OwnerType m_owner = {}; + Optional m_allocationCallbacks = nullptr; + Dispatch const * m_dispatch = nullptr; }; template class PoolFree { public: - PoolFree( OwnerType owner = OwnerType(), PoolType pool = PoolType(), Dispatch const &dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT + PoolFree() = default; + + PoolFree( OwnerType owner, + PoolType pool, + Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT : m_owner( owner ) , m_pool( pool ) , m_dispatch( &dispatch ) @@ -2922,55 +3698,9 @@ namespace VULKAN_HPP_NAMESPACE } private: - OwnerType m_owner; - PoolType m_pool; - Dispatch const* m_dispatch; - }; - - template - class ConstExpression1DArrayCopy - { - public: - VULKAN_HPP_CONSTEXPR_14 static void copy(T dst[N], std::array const& src) VULKAN_HPP_NOEXCEPT - { - dst[I-1] = src[I-1]; - ConstExpression1DArrayCopy::copy(dst, src); - } - }; - - template - class ConstExpression1DArrayCopy - { - public: - VULKAN_HPP_CONSTEXPR_14 static void copy(T /*dst*/[N], std::array const& /*src*/) VULKAN_HPP_NOEXCEPT {} - }; - - template - class ConstExpression2DArrayCopy - { - public: - VULKAN_HPP_CONSTEXPR_14 static void copy(T dst[N][M], std::array, N> const& src) VULKAN_HPP_NOEXCEPT - { - dst[I - 1][J - 1] = src[I - 1][J - 1]; - ConstExpression2DArrayCopy::copy(dst, src); - } - }; - - template - class ConstExpression2DArrayCopy - { - public: - VULKAN_HPP_CONSTEXPR_14 static void copy(T dst[N][M], std::array, N> const& src) VULKAN_HPP_NOEXCEPT - { - ConstExpression2DArrayCopy::copy(dst, src); - } - }; - - template - class ConstExpression2DArrayCopy - { - public: - VULKAN_HPP_CONSTEXPR_14 static void copy(T /*dst*/[N][M], std::array, N> const& /*src*/) VULKAN_HPP_NOEXCEPT {} + OwnerType m_owner = OwnerType(); + PoolType m_pool = PoolType(); + Dispatch const * m_dispatch = nullptr; }; using Bool32 = uint32_t; @@ -2978,41 +3708,73 @@ namespace VULKAN_HPP_NAMESPACE using DeviceSize = uint64_t; using SampleMask = uint32_t; - enum class AccelerationStructureMemoryRequirementsTypeNV + template + struct CppType + {}; + + template + struct isVulkanHandleType { - eObject = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV, - eBuildScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV, - eUpdateScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = false; }; - VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMemoryRequirementsTypeNV value ) +#ifdef VK_ENABLE_BETA_EXTENSIONS + enum class AccelerationStructureBuildTypeKHR + { + eHost = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR, + eDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR, + eHostOrDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureBuildTypeKHR value ) { switch ( value ) { - case AccelerationStructureMemoryRequirementsTypeNV::eObject : return "Object"; - case AccelerationStructureMemoryRequirementsTypeNV::eBuildScratch : return "BuildScratch"; - case AccelerationStructureMemoryRequirementsTypeNV::eUpdateScratch : return "UpdateScratch"; + case AccelerationStructureBuildTypeKHR::eHost : return "Host"; + case AccelerationStructureBuildTypeKHR::eDevice : return "Device"; + case AccelerationStructureBuildTypeKHR::eHostOrDevice : return "HostOrDevice"; + default: return "invalid"; + } + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + enum class AccelerationStructureMemoryRequirementsTypeKHR + { + eObject = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR, + eBuildScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR, + eUpdateScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR + }; + using AccelerationStructureMemoryRequirementsTypeNV = AccelerationStructureMemoryRequirementsTypeKHR; + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMemoryRequirementsTypeKHR value ) + { + switch ( value ) + { + case AccelerationStructureMemoryRequirementsTypeKHR::eObject : return "Object"; + case AccelerationStructureMemoryRequirementsTypeKHR::eBuildScratch : return "BuildScratch"; + case AccelerationStructureMemoryRequirementsTypeKHR::eUpdateScratch : return "UpdateScratch"; default: return "invalid"; } } - enum class AccelerationStructureTypeNV + enum class AccelerationStructureTypeKHR { - eTopLevel = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV, - eBottomLevel = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV + eTopLevel = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR, + eBottomLevel = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR }; + using AccelerationStructureTypeNV = AccelerationStructureTypeKHR; - VULKAN_HPP_INLINE std::string to_string( AccelerationStructureTypeNV value ) + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureTypeKHR value ) { switch ( value ) { - case AccelerationStructureTypeNV::eTopLevel : return "TopLevel"; - case AccelerationStructureTypeNV::eBottomLevel : return "BottomLevel"; + case AccelerationStructureTypeKHR::eTopLevel : return "TopLevel"; + case AccelerationStructureTypeKHR::eBottomLevel : return "BottomLevel"; default: return "invalid"; } } - enum class AccessFlagBits + enum class AccessFlagBits : VkAccessFlags { eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT, eIndexRead = VK_ACCESS_INDEX_READ_BIT, @@ -3035,13 +3797,15 @@ namespace VULKAN_HPP_NAMESPACE eTransformFeedbackCounterReadEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT, eTransformFeedbackCounterWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT, eConditionalRenderingReadEXT = VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT, - eCommandProcessReadNVX = VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX, - eCommandProcessWriteNVX = VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX, eColorAttachmentReadNoncoherentEXT = VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT, + eAccelerationStructureReadKHR = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR, + eAccelerationStructureWriteKHR = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, eShadingRateImageReadNV = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV, + eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT, + eCommandPreprocessReadNV = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV, + eCommandPreprocessWriteNV = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV, eAccelerationStructureReadNV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV, - eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV, - eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT + eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV }; VULKAN_HPP_INLINE std::string to_string( AccessFlagBits value ) @@ -3069,18 +3833,18 @@ namespace VULKAN_HPP_NAMESPACE case AccessFlagBits::eTransformFeedbackCounterReadEXT : return "TransformFeedbackCounterReadEXT"; case AccessFlagBits::eTransformFeedbackCounterWriteEXT : return "TransformFeedbackCounterWriteEXT"; case AccessFlagBits::eConditionalRenderingReadEXT : return "ConditionalRenderingReadEXT"; - case AccessFlagBits::eCommandProcessReadNVX : return "CommandProcessReadNVX"; - case AccessFlagBits::eCommandProcessWriteNVX : return "CommandProcessWriteNVX"; case AccessFlagBits::eColorAttachmentReadNoncoherentEXT : return "ColorAttachmentReadNoncoherentEXT"; + case AccessFlagBits::eAccelerationStructureReadKHR : return "AccelerationStructureReadKHR"; + case AccessFlagBits::eAccelerationStructureWriteKHR : return "AccelerationStructureWriteKHR"; case AccessFlagBits::eShadingRateImageReadNV : return "ShadingRateImageReadNV"; - case AccessFlagBits::eAccelerationStructureReadNV : return "AccelerationStructureReadNV"; - case AccessFlagBits::eAccelerationStructureWriteNV : return "AccelerationStructureWriteNV"; case AccessFlagBits::eFragmentDensityMapReadEXT : return "FragmentDensityMapReadEXT"; + case AccessFlagBits::eCommandPreprocessReadNV : return "CommandPreprocessReadNV"; + case AccessFlagBits::eCommandPreprocessWriteNV : return "CommandPreprocessWriteNV"; default: return "invalid"; } } - enum class AcquireProfilingLockFlagBitsKHR + enum class AcquireProfilingLockFlagBitsKHR : VkAcquireProfilingLockFlagsKHR {}; VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagBitsKHR ) @@ -3088,7 +3852,7 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - enum class AttachmentDescriptionFlagBits + enum class AttachmentDescriptionFlagBits : VkAttachmentDescriptionFlags { eMayAlias = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT }; @@ -3123,7 +3887,8 @@ namespace VULKAN_HPP_NAMESPACE enum class AttachmentStoreOp { eStore = VK_ATTACHMENT_STORE_OP_STORE, - eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE + eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE, + eNoneQCOM = VK_ATTACHMENT_STORE_OP_NONE_QCOM }; VULKAN_HPP_INLINE std::string to_string( AttachmentStoreOp value ) @@ -3132,6 +3897,7 @@ namespace VULKAN_HPP_NAMESPACE { case AttachmentStoreOp::eStore : return "Store"; case AttachmentStoreOp::eDontCare : return "DontCare"; + case AttachmentStoreOp::eNoneQCOM : return "NoneQCOM"; default: return "invalid"; } } @@ -3325,7 +4091,9 @@ namespace VULKAN_HPP_NAMESPACE eFloatOpaqueBlack = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK, eIntOpaqueBlack = VK_BORDER_COLOR_INT_OPAQUE_BLACK, eFloatOpaqueWhite = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE, - eIntOpaqueWhite = VK_BORDER_COLOR_INT_OPAQUE_WHITE + eIntOpaqueWhite = VK_BORDER_COLOR_INT_OPAQUE_WHITE, + eFloatCustomEXT = VK_BORDER_COLOR_FLOAT_CUSTOM_EXT, + eIntCustomEXT = VK_BORDER_COLOR_INT_CUSTOM_EXT }; VULKAN_HPP_INLINE std::string to_string( BorderColor value ) @@ -3338,11 +4106,13 @@ namespace VULKAN_HPP_NAMESPACE case BorderColor::eIntOpaqueBlack : return "IntOpaqueBlack"; case BorderColor::eFloatOpaqueWhite : return "FloatOpaqueWhite"; case BorderColor::eIntOpaqueWhite : return "IntOpaqueWhite"; + case BorderColor::eFloatCustomEXT : return "FloatCustomEXT"; + case BorderColor::eIntCustomEXT : return "IntCustomEXT"; default: return "invalid"; } } - enum class BufferCreateFlagBits + enum class BufferCreateFlagBits : VkBufferCreateFlags { eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT, eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT, @@ -3366,7 +4136,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class BufferUsageFlagBits + enum class BufferUsageFlagBits : VkBufferUsageFlags { eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT, eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT, @@ -3381,6 +4151,7 @@ namespace VULKAN_HPP_NAMESPACE eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT, eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT, eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT, + eRayTracingKHR = VK_BUFFER_USAGE_RAY_TRACING_BIT_KHR, eRayTracingNV = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV, eShaderDeviceAddressEXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT, eShaderDeviceAddressKHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR @@ -3403,37 +4174,30 @@ namespace VULKAN_HPP_NAMESPACE case BufferUsageFlagBits::eTransformFeedbackBufferEXT : return "TransformFeedbackBufferEXT"; case BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT : return "TransformFeedbackCounterBufferEXT"; case BufferUsageFlagBits::eConditionalRenderingEXT : return "ConditionalRenderingEXT"; - case BufferUsageFlagBits::eRayTracingNV : return "RayTracingNV"; + case BufferUsageFlagBits::eRayTracingKHR : return "RayTracingKHR"; default: return "invalid"; } } - enum class BufferViewCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlagBits ) + enum class BuildAccelerationStructureFlagBitsKHR : VkBuildAccelerationStructureFlagsKHR { - return "(void)"; - } - - enum class BuildAccelerationStructureFlagBitsNV - { - eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV, - eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV, - ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV, - ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV, - eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV + eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR, + eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR, + ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR, + ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR, + eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR }; + using BuildAccelerationStructureFlagBitsNV = BuildAccelerationStructureFlagBitsKHR; - VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagBitsNV value ) + VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagBitsKHR value ) { switch ( value ) { - case BuildAccelerationStructureFlagBitsNV::eAllowUpdate : return "AllowUpdate"; - case BuildAccelerationStructureFlagBitsNV::eAllowCompaction : return "AllowCompaction"; - case BuildAccelerationStructureFlagBitsNV::ePreferFastTrace : return "PreferFastTrace"; - case BuildAccelerationStructureFlagBitsNV::ePreferFastBuild : return "PreferFastBuild"; - case BuildAccelerationStructureFlagBitsNV::eLowMemory : return "LowMemory"; + case BuildAccelerationStructureFlagBitsKHR::eAllowUpdate : return "AllowUpdate"; + case BuildAccelerationStructureFlagBitsKHR::eAllowCompaction : return "AllowCompaction"; + case BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace : return "PreferFastTrace"; + case BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild : return "PreferFastBuild"; + case BuildAccelerationStructureFlagBitsKHR::eLowMemory : return "LowMemory"; default: return "invalid"; } } @@ -3475,7 +4239,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class ColorComponentFlagBits + enum class ColorComponentFlagBits : VkColorComponentFlags { eR = VK_COLOR_COMPONENT_R_BIT, eG = VK_COLOR_COMPONENT_G_BIT, @@ -3557,7 +4321,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class CommandBufferResetFlagBits + enum class CommandBufferResetFlagBits : VkCommandBufferResetFlags { eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT }; @@ -3571,7 +4335,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class CommandBufferUsageFlagBits + enum class CommandBufferUsageFlagBits : VkCommandBufferUsageFlags { eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, eRenderPassContinue = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT, @@ -3589,7 +4353,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class CommandPoolCreateFlagBits + enum class CommandPoolCreateFlagBits : VkCommandPoolCreateFlags { eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT, eResetCommandBuffer = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, @@ -3607,7 +4371,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class CommandPoolResetFlagBits + enum class CommandPoolResetFlagBits : VkCommandPoolResetFlags { eReleaseResources = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT }; @@ -3709,7 +4473,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class CompositeAlphaFlagBitsKHR + enum class CompositeAlphaFlagBitsKHR : VkCompositeAlphaFlagsKHR { eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR, ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR, @@ -3729,7 +4493,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class ConditionalRenderingFlagBitsEXT + enum class ConditionalRenderingFlagBitsEXT : VkConditionalRenderingFlagsEXT { eInverted = VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT }; @@ -3761,18 +4525,23 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class CopyAccelerationStructureModeNV + enum class CopyAccelerationStructureModeKHR { - eClone = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV, - eCompact = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV + eClone = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR, + eCompact = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR, + eSerialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR, + eDeserialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR }; + using CopyAccelerationStructureModeNV = CopyAccelerationStructureModeKHR; - VULKAN_HPP_INLINE std::string to_string( CopyAccelerationStructureModeNV value ) + VULKAN_HPP_INLINE std::string to_string( CopyAccelerationStructureModeKHR value ) { switch ( value ) { - case CopyAccelerationStructureModeNV::eClone : return "Clone"; - case CopyAccelerationStructureModeNV::eCompact : return "Compact"; + case CopyAccelerationStructureModeKHR::eClone : return "Clone"; + case CopyAccelerationStructureModeKHR::eCompact : return "Compact"; + case CopyAccelerationStructureModeKHR::eSerialize : return "Serialize"; + case CopyAccelerationStructureModeKHR::eDeserialize : return "Deserialize"; default: return "invalid"; } } @@ -3813,7 +4582,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class CullModeFlagBits + enum class CullModeFlagBits : VkCullModeFlags { eNone = VK_CULL_MODE_NONE, eFront = VK_CULL_MODE_FRONT_BIT, @@ -3833,7 +4602,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class DebugReportFlagBitsEXT + enum class DebugReportFlagBitsEXT : VkDebugReportFlagsEXT { eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT, eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT, @@ -3888,16 +4657,15 @@ namespace VULKAN_HPP_NAMESPACE eDebugReportCallbackEXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, eDisplayKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT, eDisplayModeKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT, - eObjectTableNVX = VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT, - eIndirectCommandsLayoutNVX = VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT, eValidationCacheEXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, eSamplerYcbcrConversion = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, eDescriptorUpdateTemplate = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, + eAccelerationStructureKHR = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT, eAccelerationStructureNV = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT, eDebugReport = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT, - eValidationCache = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT, eDescriptorUpdateTemplateKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT, - eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT + eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT, + eValidationCache = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT }; VULKAN_HPP_INLINE std::string to_string( DebugReportObjectTypeEXT value ) @@ -3935,17 +4703,15 @@ namespace VULKAN_HPP_NAMESPACE case DebugReportObjectTypeEXT::eDebugReportCallbackEXT : return "DebugReportCallbackEXT"; case DebugReportObjectTypeEXT::eDisplayKHR : return "DisplayKHR"; case DebugReportObjectTypeEXT::eDisplayModeKHR : return "DisplayModeKHR"; - case DebugReportObjectTypeEXT::eObjectTableNVX : return "ObjectTableNVX"; - case DebugReportObjectTypeEXT::eIndirectCommandsLayoutNVX : return "IndirectCommandsLayoutNVX"; case DebugReportObjectTypeEXT::eValidationCacheEXT : return "ValidationCacheEXT"; case DebugReportObjectTypeEXT::eSamplerYcbcrConversion : return "SamplerYcbcrConversion"; case DebugReportObjectTypeEXT::eDescriptorUpdateTemplate : return "DescriptorUpdateTemplate"; - case DebugReportObjectTypeEXT::eAccelerationStructureNV : return "AccelerationStructureNV"; + case DebugReportObjectTypeEXT::eAccelerationStructureKHR : return "AccelerationStructureKHR"; default: return "invalid"; } } - enum class DebugUtilsMessageSeverityFlagBitsEXT + enum class DebugUtilsMessageSeverityFlagBitsEXT : VkDebugUtilsMessageSeverityFlagsEXT { eVerbose = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT, eInfo = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT, @@ -3965,7 +4731,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class DebugUtilsMessageTypeFlagBitsEXT + enum class DebugUtilsMessageTypeFlagBitsEXT : VkDebugUtilsMessageTypeFlagsEXT { eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT, eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT, @@ -3983,13 +4749,13 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class DependencyFlagBits + enum class DependencyFlagBits : VkDependencyFlags { eByRegion = VK_DEPENDENCY_BY_REGION_BIT, eDeviceGroup = VK_DEPENDENCY_DEVICE_GROUP_BIT, eViewLocal = VK_DEPENDENCY_VIEW_LOCAL_BIT, - eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR, - eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR + eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR, + eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR }; VULKAN_HPP_INLINE std::string to_string( DependencyFlagBits value ) @@ -4003,7 +4769,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class DescriptorBindingFlagBits + enum class DescriptorBindingFlagBits : VkDescriptorBindingFlags { eUpdateAfterBind = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT, eUpdateUnusedWhilePending = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT, @@ -4024,7 +4790,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class DescriptorPoolCreateFlagBits + enum class DescriptorPoolCreateFlagBits : VkDescriptorPoolCreateFlags { eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, eUpdateAfterBind = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT, @@ -4041,7 +4807,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class DescriptorSetLayoutCreateFlagBits + enum class DescriptorSetLayoutCreateFlagBits : VkDescriptorSetLayoutCreateFlags { eUpdateAfterBindPool = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT, ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, @@ -4072,6 +4838,7 @@ namespace VULKAN_HPP_NAMESPACE eStorageBufferDynamic = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, eInputAttachment = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, eInlineUniformBlockEXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, + eAccelerationStructureKHR = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, eAccelerationStructureNV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV }; @@ -4091,7 +4858,7 @@ namespace VULKAN_HPP_NAMESPACE case DescriptorType::eStorageBufferDynamic : return "StorageBufferDynamic"; case DescriptorType::eInputAttachment : return "InputAttachment"; case DescriptorType::eInlineUniformBlockEXT : return "InlineUniformBlockEXT"; - case DescriptorType::eAccelerationStructureNV : return "AccelerationStructureNV"; + case DescriptorType::eAccelerationStructureKHR : return "AccelerationStructureKHR"; default: return "invalid"; } } @@ -4121,6 +4888,24 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } + enum class DeviceDiagnosticsConfigFlagBitsNV : VkDeviceDiagnosticsConfigFlagsNV + { + eEnableShaderDebugInfo = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV, + eEnableResourceTracking = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV, + eEnableAutomaticCheckpoints = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagBitsNV value ) + { + switch ( value ) + { + case DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo : return "EnableShaderDebugInfo"; + case DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking : return "EnableResourceTracking"; + case DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints : return "EnableAutomaticCheckpoints"; + default: return "invalid"; + } + } + enum class DeviceEventTypeEXT { eDisplayHotplug = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT @@ -4135,7 +4920,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class DeviceGroupPresentModeFlagBitsKHR + enum class DeviceGroupPresentModeFlagBitsKHR : VkDeviceGroupPresentModeFlagsKHR { eLocal = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR, eRemote = VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR, @@ -4155,7 +4940,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class DeviceQueueCreateFlagBits + enum class DeviceQueueCreateFlagBits : VkDeviceQueueCreateFlags { eProtected = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT }; @@ -4199,7 +4984,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class DisplayPlaneAlphaFlagBitsKHR + enum class DisplayPlaneAlphaFlagBitsKHR : VkDisplayPlaneAlphaFlagsKHR { eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR, eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR, @@ -4251,6 +5036,8 @@ namespace VULKAN_HPP_NAMESPACE eGoogleSwiftshader = VK_DRIVER_ID_GOOGLE_SWIFTSHADER, eGgpProprietary = VK_DRIVER_ID_GGP_PROPRIETARY, eBroadcomProprietary = VK_DRIVER_ID_BROADCOM_PROPRIETARY, + eMesaLlvmpipe = VK_DRIVER_ID_MESA_LLVMPIPE, + eMoltenvk = VK_DRIVER_ID_MOLTENVK, eIntelOpenSourceMesa = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR }; using DriverIdKHR = DriverId; @@ -4271,6 +5058,8 @@ namespace VULKAN_HPP_NAMESPACE case DriverId::eGoogleSwiftshader : return "GoogleSwiftshader"; case DriverId::eGgpProprietary : return "GgpProprietary"; case DriverId::eBroadcomProprietary : return "BroadcomProprietary"; + case DriverId::eMesaLlvmpipe : return "MesaLlvmpipe"; + case DriverId::eMoltenvk : return "Moltenvk"; default: return "invalid"; } } @@ -4292,7 +5081,19 @@ namespace VULKAN_HPP_NAMESPACE eViewportShadingRatePaletteNV = VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV, eViewportCoarseSampleOrderNV = VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV, eExclusiveScissorNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV, - eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT + eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT, + eCullModeEXT = VK_DYNAMIC_STATE_CULL_MODE_EXT, + eFrontFaceEXT = VK_DYNAMIC_STATE_FRONT_FACE_EXT, + ePrimitiveTopologyEXT = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT, + eViewportWithCountEXT = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT, + eScissorWithCountEXT = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT, + eVertexInputBindingStrideEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT, + eDepthTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT, + eDepthWriteEnableEXT = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT, + eDepthCompareOpEXT = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT, + eDepthBoundsTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT, + eStencilTestEnableEXT = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT, + eStencilOpEXT = VK_DYNAMIC_STATE_STENCIL_OP_EXT }; VULKAN_HPP_INLINE std::string to_string( DynamicState value ) @@ -4315,11 +5116,23 @@ namespace VULKAN_HPP_NAMESPACE case DynamicState::eViewportCoarseSampleOrderNV : return "ViewportCoarseSampleOrderNV"; case DynamicState::eExclusiveScissorNV : return "ExclusiveScissorNV"; case DynamicState::eLineStippleEXT : return "LineStippleEXT"; + case DynamicState::eCullModeEXT : return "CullModeEXT"; + case DynamicState::eFrontFaceEXT : return "FrontFaceEXT"; + case DynamicState::ePrimitiveTopologyEXT : return "PrimitiveTopologyEXT"; + case DynamicState::eViewportWithCountEXT : return "ViewportWithCountEXT"; + case DynamicState::eScissorWithCountEXT : return "ScissorWithCountEXT"; + case DynamicState::eVertexInputBindingStrideEXT : return "VertexInputBindingStrideEXT"; + case DynamicState::eDepthTestEnableEXT : return "DepthTestEnableEXT"; + case DynamicState::eDepthWriteEnableEXT : return "DepthWriteEnableEXT"; + case DynamicState::eDepthCompareOpEXT : return "DepthCompareOpEXT"; + case DynamicState::eDepthBoundsTestEnableEXT : return "DepthBoundsTestEnableEXT"; + case DynamicState::eStencilTestEnableEXT : return "StencilTestEnableEXT"; + case DynamicState::eStencilOpEXT : return "StencilOpEXT"; default: return "invalid"; } } - enum class ExternalFenceFeatureFlagBits + enum class ExternalFenceFeatureFlagBits : VkExternalFenceFeatureFlags { eExportable = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT, eImportable = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT @@ -4336,7 +5149,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class ExternalFenceHandleTypeFlagBits + enum class ExternalFenceHandleTypeFlagBits : VkExternalFenceHandleTypeFlags { eOpaqueFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT, eOpaqueWin32 = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT, @@ -4357,7 +5170,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class ExternalMemoryFeatureFlagBits + enum class ExternalMemoryFeatureFlagBits : VkExternalMemoryFeatureFlags { eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT, eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT, @@ -4376,7 +5189,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class ExternalMemoryFeatureFlagBitsNV + enum class ExternalMemoryFeatureFlagBitsNV : VkExternalMemoryFeatureFlagsNV { eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV, eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV, @@ -4394,7 +5207,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class ExternalMemoryHandleTypeFlagBits + enum class ExternalMemoryHandleTypeFlagBits : VkExternalMemoryHandleTypeFlags { eOpaqueFd = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, @@ -4429,7 +5242,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class ExternalMemoryHandleTypeFlagBitsNV + enum class ExternalMemoryHandleTypeFlagBitsNV : VkExternalMemoryHandleTypeFlagsNV { eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV, eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV, @@ -4449,7 +5262,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class ExternalSemaphoreFeatureFlagBits + enum class ExternalSemaphoreFeatureFlagBits : VkExternalSemaphoreFeatureFlags { eExportable = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT, eImportable = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT @@ -4466,13 +5279,14 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class ExternalSemaphoreHandleTypeFlagBits + enum class ExternalSemaphoreHandleTypeFlagBits : VkExternalSemaphoreHandleTypeFlags { eOpaqueFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT, eOpaqueWin32 = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT, eOpaqueWin32Kmt = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, eD3D12Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, - eSyncFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT + eSyncFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT, + eD3D11Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D11_FENCE_BIT }; using ExternalSemaphoreHandleTypeFlagBitsKHR = ExternalSemaphoreHandleTypeFlagBits; @@ -4489,7 +5303,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class FenceCreateFlagBits + enum class FenceCreateFlagBits : VkFenceCreateFlags { eSignaled = VK_FENCE_CREATE_SIGNALED_BIT }; @@ -4503,7 +5317,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class FenceImportFlagBits + enum class FenceImportFlagBits : VkFenceImportFlags { eTemporary = VK_FENCE_IMPORT_TEMPORARY_BIT }; @@ -4780,40 +5594,42 @@ namespace VULKAN_HPP_NAMESPACE eAstc10x10SfloatBlockEXT = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT, eAstc12x10SfloatBlockEXT = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT, eAstc12x12SfloatBlockEXT = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT, - eG8B8G8R8422UnormKHR = VK_FORMAT_G8B8G8R8_422_UNORM_KHR, - eB8G8R8G8422UnormKHR = VK_FORMAT_B8G8R8G8_422_UNORM_KHR, - eG8B8R83Plane420UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR, - eG8B8R82Plane420UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR, - eG8B8R83Plane422UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR, - eG8B8R82Plane422UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR, - eG8B8R83Plane444UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR, - eR10X6UnormPack16KHR = VK_FORMAT_R10X6_UNORM_PACK16_KHR, - eR10X6G10X6Unorm2Pack16KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR, - eR10X6G10X6B10X6A10X6Unorm4Pack16KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR, - eG10X6B10X6G10X6R10X6422Unorm4Pack16KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR, + eA4R4G4B4UnormPack16EXT = VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT, + eA4B4G4R4UnormPack16EXT = VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT, eB10X6G10X6R10X6G10X6422Unorm4Pack16KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR, - eG10X6B10X6R10X63Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR, - eG10X6B10X6R10X62Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR, - eG10X6B10X6R10X63Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR, - eG10X6B10X6R10X62Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR, - eG10X6B10X6R10X63Plane444Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR, - eR12X4UnormPack16KHR = VK_FORMAT_R12X4_UNORM_PACK16_KHR, - eR12X4G12X4Unorm2Pack16KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR, - eR12X4G12X4B12X4A12X4Unorm4Pack16KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR, - eG12X4B12X4G12X4R12X4422Unorm4Pack16KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR, eB12X4G12X4R12X4G12X4422Unorm4Pack16KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR, - eG12X4B12X4R12X43Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR, + eB16G16R16G16422UnormKHR = VK_FORMAT_B16G16R16G16_422_UNORM_KHR, + eB8G8R8G8422UnormKHR = VK_FORMAT_B8G8R8G8_422_UNORM_KHR, + eG10X6B10X6G10X6R10X6422Unorm4Pack16KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR, + eG10X6B10X6R10X62Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR, + eG10X6B10X6R10X62Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR, + eG10X6B10X6R10X63Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR, + eG10X6B10X6R10X63Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR, + eG10X6B10X6R10X63Plane444Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR, + eG12X4B12X4G12X4R12X4422Unorm4Pack16KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR, eG12X4B12X4R12X42Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR, - eG12X4B12X4R12X43Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR, eG12X4B12X4R12X42Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR, + eG12X4B12X4R12X43Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR, + eG12X4B12X4R12X43Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR, eG12X4B12X4R12X43Plane444Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR, eG16B16G16R16422UnormKHR = VK_FORMAT_G16B16G16R16_422_UNORM_KHR, - eB16G16R16G16422UnormKHR = VK_FORMAT_B16G16R16G16_422_UNORM_KHR, - eG16B16R163Plane420UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR, eG16B16R162Plane420UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR, - eG16B16R163Plane422UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR, eG16B16R162Plane422UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR, - eG16B16R163Plane444UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR + eG16B16R163Plane420UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR, + eG16B16R163Plane422UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR, + eG16B16R163Plane444UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR, + eG8B8G8R8422UnormKHR = VK_FORMAT_G8B8G8R8_422_UNORM_KHR, + eG8B8R82Plane420UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR, + eG8B8R82Plane422UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR, + eG8B8R83Plane420UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR, + eG8B8R83Plane422UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR, + eG8B8R83Plane444UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR, + eR10X6G10X6B10X6A10X6Unorm4Pack16KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR, + eR10X6G10X6Unorm2Pack16KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR, + eR10X6UnormPack16KHR = VK_FORMAT_R10X6_UNORM_PACK16_KHR, + eR12X4G12X4B12X4A12X4Unorm4Pack16KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR, + eR12X4G12X4Unorm2Pack16KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR, + eR12X4UnormPack16KHR = VK_FORMAT_R12X4_UNORM_PACK16_KHR }; VULKAN_HPP_INLINE std::string to_string( Format value ) @@ -5061,11 +5877,13 @@ namespace VULKAN_HPP_NAMESPACE case Format::eAstc10x10SfloatBlockEXT : return "Astc10x10SfloatBlockEXT"; case Format::eAstc12x10SfloatBlockEXT : return "Astc12x10SfloatBlockEXT"; case Format::eAstc12x12SfloatBlockEXT : return "Astc12x12SfloatBlockEXT"; + case Format::eA4R4G4B4UnormPack16EXT : return "A4R4G4B4UnormPack16EXT"; + case Format::eA4B4G4R4UnormPack16EXT : return "A4B4G4R4UnormPack16EXT"; default: return "invalid"; } } - enum class FormatFeatureFlagBits + enum class FormatFeatureFlagBits : VkFormatFeatureFlags { eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT, eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT, @@ -5091,18 +5909,19 @@ namespace VULKAN_HPP_NAMESPACE eCositedChromaSamples = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, eSampledImageFilterMinmax = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT, eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG, + eAccelerationStructureVertexBufferKHR = VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR, eFragmentDensityMapEXT = VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT, - eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR, - eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR, - eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT, + eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR, + eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT_KHR, eMidpointChromaSamplesKHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR, - eSampledImageYcbcrConversionLinearFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR, - eSampledImageYcbcrConversionSeparateReconstructionFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR, + eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT, + eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT, eSampledImageYcbcrConversionChromaReconstructionExplicitKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR, eSampledImageYcbcrConversionChromaReconstructionExplicitForceableKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR, - eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT_KHR, - eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR, - eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT + eSampledImageYcbcrConversionLinearFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR, + eSampledImageYcbcrConversionSeparateReconstructionFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR, + eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR, + eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR }; VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlagBits value ) @@ -5133,12 +5952,13 @@ namespace VULKAN_HPP_NAMESPACE case FormatFeatureFlagBits::eCositedChromaSamples : return "CositedChromaSamples"; case FormatFeatureFlagBits::eSampledImageFilterMinmax : return "SampledImageFilterMinmax"; case FormatFeatureFlagBits::eSampledImageFilterCubicIMG : return "SampledImageFilterCubicIMG"; + case FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR : return "AccelerationStructureVertexBufferKHR"; case FormatFeatureFlagBits::eFragmentDensityMapEXT : return "FragmentDensityMapEXT"; default: return "invalid"; } } - enum class FramebufferCreateFlagBits + enum class FramebufferCreateFlagBits : VkFramebufferCreateFlags { eImageless = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, eImagelessKHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR @@ -5191,59 +6011,65 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - enum class GeometryFlagBitsNV + enum class GeometryFlagBitsKHR : VkGeometryFlagsKHR { - eOpaque = VK_GEOMETRY_OPAQUE_BIT_NV, - eNoDuplicateAnyHitInvocation = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV + eOpaque = VK_GEOMETRY_OPAQUE_BIT_KHR, + eNoDuplicateAnyHitInvocation = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR }; + using GeometryFlagBitsNV = GeometryFlagBitsKHR; - VULKAN_HPP_INLINE std::string to_string( GeometryFlagBitsNV value ) + VULKAN_HPP_INLINE std::string to_string( GeometryFlagBitsKHR value ) { switch ( value ) { - case GeometryFlagBitsNV::eOpaque : return "Opaque"; - case GeometryFlagBitsNV::eNoDuplicateAnyHitInvocation : return "NoDuplicateAnyHitInvocation"; + case GeometryFlagBitsKHR::eOpaque : return "Opaque"; + case GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation : return "NoDuplicateAnyHitInvocation"; default: return "invalid"; } } - enum class GeometryInstanceFlagBitsNV + enum class GeometryInstanceFlagBitsKHR : VkGeometryInstanceFlagsKHR { - eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV, - eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV, - eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV, - eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV + eTriangleFacingCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR, + eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR, + eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR, + eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR, + eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV }; + using GeometryInstanceFlagBitsNV = GeometryInstanceFlagBitsKHR; - VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagBitsNV value ) + VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagBitsKHR value ) { switch ( value ) { - case GeometryInstanceFlagBitsNV::eTriangleCullDisable : return "TriangleCullDisable"; - case GeometryInstanceFlagBitsNV::eTriangleFrontCounterclockwise : return "TriangleFrontCounterclockwise"; - case GeometryInstanceFlagBitsNV::eForceOpaque : return "ForceOpaque"; - case GeometryInstanceFlagBitsNV::eForceNoOpaque : return "ForceNoOpaque"; + case GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable : return "TriangleFacingCullDisable"; + case GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise : return "TriangleFrontCounterclockwise"; + case GeometryInstanceFlagBitsKHR::eForceOpaque : return "ForceOpaque"; + case GeometryInstanceFlagBitsKHR::eForceNoOpaque : return "ForceNoOpaque"; default: return "invalid"; } } - enum class GeometryTypeNV + enum class GeometryTypeKHR { - eTriangles = VK_GEOMETRY_TYPE_TRIANGLES_NV, - eAabbs = VK_GEOMETRY_TYPE_AABBS_NV + eTriangles = VK_GEOMETRY_TYPE_TRIANGLES_KHR, + eAabbs = VK_GEOMETRY_TYPE_AABBS_KHR, + eInstances = VK_GEOMETRY_TYPE_INSTANCES_KHR }; + using GeometryTypeNV = GeometryTypeKHR; - VULKAN_HPP_INLINE std::string to_string( GeometryTypeNV value ) + VULKAN_HPP_INLINE std::string to_string( GeometryTypeKHR value ) { switch ( value ) { - case GeometryTypeNV::eTriangles : return "Triangles"; - case GeometryTypeNV::eAabbs : return "Aabbs"; + case GeometryTypeKHR::eTriangles : return "Triangles"; + case GeometryTypeKHR::eAabbs : return "Aabbs"; + case GeometryTypeKHR::eInstances : return "Instances"; default: return "invalid"; } } - enum class ImageAspectFlagBits + enum class ImageAspectFlagBits : VkImageAspectFlags { eColor = VK_IMAGE_ASPECT_COLOR_BIT, eDepth = VK_IMAGE_ASPECT_DEPTH_BIT, @@ -5280,7 +6106,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class ImageCreateFlagBits + enum class ImageCreateFlagBits : VkImageCreateFlags { eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT, eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, @@ -5297,12 +6123,12 @@ namespace VULKAN_HPP_NAMESPACE eCornerSampledNV = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV, eSampleLocationsCompatibleDepthEXT = VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT, eSubsampledEXT = VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT, - eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR, e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR, + eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT_KHR, eBlockTexelViewCompatibleKHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR, - eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR, eDisjointKHR = VK_IMAGE_CREATE_DISJOINT_BIT_KHR, - eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT_KHR + eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR, + eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR }; VULKAN_HPP_INLINE std::string to_string( ImageCreateFlagBits value ) @@ -5349,10 +6175,10 @@ namespace VULKAN_HPP_NAMESPACE eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, eShadingRateOptimalNV = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV, eFragmentDensityMapOptimalEXT = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT, - eDepthReadOnlyStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR, - eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR, eDepthAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, + eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR, eDepthReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, + eDepthReadOnlyStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR, eStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, eStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR }; @@ -5420,7 +6246,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class ImageUsageFlagBits + enum class ImageUsageFlagBits : VkImageUsageFlags { eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT, eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT, @@ -5452,9 +6278,10 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class ImageViewCreateFlagBits + enum class ImageViewCreateFlagBits : VkImageViewCreateFlags { - eFragmentDensityMapDynamicEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT + eFragmentDensityMapDynamicEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT, + eFragmentDensityMapDeferredEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT }; VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlagBits value ) @@ -5462,6 +6289,7 @@ namespace VULKAN_HPP_NAMESPACE switch ( value ) { case ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT : return "FragmentDensityMapDynamicEXT"; + case ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT : return "FragmentDensityMapDeferredEXT"; default: return "invalid"; } } @@ -5496,8 +6324,9 @@ namespace VULKAN_HPP_NAMESPACE { eUint16 = VK_INDEX_TYPE_UINT16, eUint32 = VK_INDEX_TYPE_UINT32, - eNoneNV = VK_INDEX_TYPE_NONE_NV, - eUint8EXT = VK_INDEX_TYPE_UINT8_EXT + eNoneKHR = VK_INDEX_TYPE_NONE_KHR, + eUint8EXT = VK_INDEX_TYPE_UINT8_EXT, + eNoneNV = VK_INDEX_TYPE_NONE_NV }; VULKAN_HPP_INLINE std::string to_string( IndexType value ) @@ -5506,56 +6335,68 @@ namespace VULKAN_HPP_NAMESPACE { case IndexType::eUint16 : return "Uint16"; case IndexType::eUint32 : return "Uint32"; - case IndexType::eNoneNV : return "NoneNV"; + case IndexType::eNoneKHR : return "NoneKHR"; case IndexType::eUint8EXT : return "Uint8EXT"; default: return "invalid"; } } - enum class IndirectCommandsLayoutUsageFlagBitsNVX + enum class IndirectCommandsLayoutUsageFlagBitsNV : VkIndirectCommandsLayoutUsageFlagsNV { - eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX, - eSparseSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX, - eEmptyExecutions = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX, - eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX + eExplicitPreprocess = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV, + eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV, + eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV }; - VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagBitsNVX value ) + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagBitsNV value ) { switch ( value ) { - case IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences : return "UnorderedSequences"; - case IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences : return "SparseSequences"; - case IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions : return "EmptyExecutions"; - case IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences : return "IndexedSequences"; + case IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess : return "ExplicitPreprocess"; + case IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences : return "IndexedSequences"; + case IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences : return "UnorderedSequences"; default: return "invalid"; } } - enum class IndirectCommandsTokenTypeNVX + enum class IndirectCommandsTokenTypeNV { - ePipeline = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX, - eDescriptorSet = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX, - eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX, - eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX, - ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX, - eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX, - eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX, - eDispatch = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX + eShaderGroup = VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV, + eStateFlags = VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV, + eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV, + eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV, + ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV, + eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV, + eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV, + eDrawTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV }; - VULKAN_HPP_INLINE std::string to_string( IndirectCommandsTokenTypeNVX value ) + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsTokenTypeNV value ) { switch ( value ) { - case IndirectCommandsTokenTypeNVX::ePipeline : return "Pipeline"; - case IndirectCommandsTokenTypeNVX::eDescriptorSet : return "DescriptorSet"; - case IndirectCommandsTokenTypeNVX::eIndexBuffer : return "IndexBuffer"; - case IndirectCommandsTokenTypeNVX::eVertexBuffer : return "VertexBuffer"; - case IndirectCommandsTokenTypeNVX::ePushConstant : return "PushConstant"; - case IndirectCommandsTokenTypeNVX::eDrawIndexed : return "DrawIndexed"; - case IndirectCommandsTokenTypeNVX::eDraw : return "Draw"; - case IndirectCommandsTokenTypeNVX::eDispatch : return "Dispatch"; + case IndirectCommandsTokenTypeNV::eShaderGroup : return "ShaderGroup"; + case IndirectCommandsTokenTypeNV::eStateFlags : return "StateFlags"; + case IndirectCommandsTokenTypeNV::eIndexBuffer : return "IndexBuffer"; + case IndirectCommandsTokenTypeNV::eVertexBuffer : return "VertexBuffer"; + case IndirectCommandsTokenTypeNV::ePushConstant : return "PushConstant"; + case IndirectCommandsTokenTypeNV::eDrawIndexed : return "DrawIndexed"; + case IndirectCommandsTokenTypeNV::eDraw : return "Draw"; + case IndirectCommandsTokenTypeNV::eDrawTasks : return "DrawTasks"; + default: return "invalid"; + } + } + + enum class IndirectStateFlagBitsNV : VkIndirectStateFlagsNV + { + eFlagFrontface = VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagBitsNV value ) + { + switch ( value ) + { + case IndirectStateFlagBitsNV::eFlagFrontface : return "FlagFrontface"; default: return "invalid"; } } @@ -5646,7 +6487,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class MemoryAllocateFlagBits + enum class MemoryAllocateFlagBits : VkMemoryAllocateFlags { eDeviceMask = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT, eDeviceAddress = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT, @@ -5665,7 +6506,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class MemoryHeapFlagBits + enum class MemoryHeapFlagBits : VkMemoryHeapFlags { eDeviceLocal = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT, eMultiInstance = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT, @@ -5700,7 +6541,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class MemoryPropertyFlagBits + enum class MemoryPropertyFlagBits : VkMemoryPropertyFlags { eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, eHostVisible = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, @@ -5728,44 +6569,6 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class ObjectEntryTypeNVX - { - eDescriptorSet = VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX, - ePipeline = VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX, - eIndexBuffer = VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX, - eVertexBuffer = VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX, - ePushConstant = VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX - }; - - VULKAN_HPP_INLINE std::string to_string( ObjectEntryTypeNVX value ) - { - switch ( value ) - { - case ObjectEntryTypeNVX::eDescriptorSet : return "DescriptorSet"; - case ObjectEntryTypeNVX::ePipeline : return "Pipeline"; - case ObjectEntryTypeNVX::eIndexBuffer : return "IndexBuffer"; - case ObjectEntryTypeNVX::eVertexBuffer : return "VertexBuffer"; - case ObjectEntryTypeNVX::ePushConstant : return "PushConstant"; - default: return "invalid"; - } - } - - enum class ObjectEntryUsageFlagBitsNVX - { - eGraphics = VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX, - eCompute = VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX - }; - - VULKAN_HPP_INLINE std::string to_string( ObjectEntryUsageFlagBitsNVX value ) - { - switch ( value ) - { - case ObjectEntryUsageFlagBitsNVX::eGraphics : return "Graphics"; - case ObjectEntryUsageFlagBitsNVX::eCompute : return "Compute"; - default: return "invalid"; - } - } - enum class ObjectType { eUnknown = VK_OBJECT_TYPE_UNKNOWN, @@ -5801,12 +6604,14 @@ namespace VULKAN_HPP_NAMESPACE eDisplayKHR = VK_OBJECT_TYPE_DISPLAY_KHR, eDisplayModeKHR = VK_OBJECT_TYPE_DISPLAY_MODE_KHR, eDebugReportCallbackEXT = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT, - eObjectTableNVX = VK_OBJECT_TYPE_OBJECT_TABLE_NVX, - eIndirectCommandsLayoutNVX = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX, eDebugUtilsMessengerEXT = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT, + eAccelerationStructureKHR = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR, eValidationCacheEXT = VK_OBJECT_TYPE_VALIDATION_CACHE_EXT, - eAccelerationStructureNV = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV, ePerformanceConfigurationINTEL = VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL, + eDeferredOperationKHR = VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR, + eIndirectCommandsLayoutNV = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV, + ePrivateDataSlotEXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT, + eAccelerationStructureNV = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV, eDescriptorUpdateTemplateKHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR, eSamplerYcbcrConversionKHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR }; @@ -5848,17 +6653,22 @@ namespace VULKAN_HPP_NAMESPACE case ObjectType::eDisplayKHR : return "DisplayKHR"; case ObjectType::eDisplayModeKHR : return "DisplayModeKHR"; case ObjectType::eDebugReportCallbackEXT : return "DebugReportCallbackEXT"; - case ObjectType::eObjectTableNVX : return "ObjectTableNVX"; - case ObjectType::eIndirectCommandsLayoutNVX : return "IndirectCommandsLayoutNVX"; case ObjectType::eDebugUtilsMessengerEXT : return "DebugUtilsMessengerEXT"; + case ObjectType::eAccelerationStructureKHR : return "AccelerationStructureKHR"; case ObjectType::eValidationCacheEXT : return "ValidationCacheEXT"; - case ObjectType::eAccelerationStructureNV : return "AccelerationStructureNV"; case ObjectType::ePerformanceConfigurationINTEL : return "PerformanceConfigurationINTEL"; + case ObjectType::eDeferredOperationKHR : return "DeferredOperationKHR"; + case ObjectType::eIndirectCommandsLayoutNV : return "IndirectCommandsLayoutNV"; + case ObjectType::ePrivateDataSlotEXT : return "PrivateDataSlotEXT"; default: return "invalid"; } } - enum class PeerMemoryFeatureFlagBits + template + struct cpp_type + {}; + + enum class PeerMemoryFeatureFlagBits : VkPeerMemoryFeatureFlags { eCopySrc = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT, eCopyDst = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT, @@ -5893,7 +6703,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class PerformanceCounterDescriptionFlagBitsKHR + enum class PerformanceCounterDescriptionFlagBitsKHR : VkPerformanceCounterDescriptionFlagsKHR { ePerformanceImpacting = VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR, eConcurrentlyImpacted = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR @@ -5915,8 +6725,8 @@ namespace VULKAN_HPP_NAMESPACE eRenderPass = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR, eCommand = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR, eVkQueryScopeCommandBuffer = VK_QUERY_SCOPE_COMMAND_BUFFER_KHR, - eVkQueryScopeRenderPass = VK_QUERY_SCOPE_RENDER_PASS_KHR, - eVkQueryScopeCommand = VK_QUERY_SCOPE_COMMAND_KHR + eVkQueryScopeCommand = VK_QUERY_SCOPE_COMMAND_KHR, + eVkQueryScopeRenderPass = VK_QUERY_SCOPE_RENDER_PASS_KHR }; VULKAN_HPP_INLINE std::string to_string( PerformanceCounterScopeKHR value ) @@ -6068,6 +6878,7 @@ namespace VULKAN_HPP_NAMESPACE { eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS, eCompute = VK_PIPELINE_BIND_POINT_COMPUTE, + eRayTracingKHR = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, eRayTracingNV = VK_PIPELINE_BIND_POINT_RAY_TRACING_NV }; @@ -6077,17 +6888,23 @@ namespace VULKAN_HPP_NAMESPACE { case PipelineBindPoint::eGraphics : return "Graphics"; case PipelineBindPoint::eCompute : return "Compute"; - case PipelineBindPoint::eRayTracingNV : return "RayTracingNV"; + case PipelineBindPoint::eRayTracingKHR : return "RayTracingKHR"; default: return "invalid"; } } - enum class PipelineCacheCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlagBits ) + enum class PipelineCacheCreateFlagBits : VkPipelineCacheCreateFlags { - return "(void)"; + eExternallySynchronizedEXT = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlagBits value ) + { + switch ( value ) + { + case PipelineCacheCreateFlagBits::eExternallySynchronizedEXT : return "ExternallySynchronizedEXT"; + default: return "invalid"; + } } enum class PipelineCacheHeaderVersion @@ -6104,15 +6921,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class PipelineColorBlendStateCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlagBits ) - { - return "(void)"; - } - - enum class PipelineCompilerControlFlagBitsAMD + enum class PipelineCompilerControlFlagBitsAMD : VkPipelineCompilerControlFlagsAMD {}; VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagBitsAMD ) @@ -6120,18 +6929,28 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - enum class PipelineCreateFlagBits + enum class PipelineCreateFlagBits : VkPipelineCreateFlags { eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT, eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT, eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT, eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, eDispatchBase = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT, + eRayTracingNoNullAnyHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, + eRayTracingNoNullClosestHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, + eRayTracingNoNullMissShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR, + eRayTracingNoNullIntersectionShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR, + eRayTracingSkipTrianglesKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR, + eRayTracingSkipAabbsKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR, eDeferCompileNV = VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV, eCaptureStatisticsKHR = VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR, eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR, - eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR, - eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR + eIndirectBindableNV = VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV, + eLibraryKHR = VK_PIPELINE_CREATE_LIBRARY_BIT_KHR, + eFailOnPipelineCompileRequiredEXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT, + eEarlyReturnOnFailureEXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT, + eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR, + eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR }; VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlagBits value ) @@ -6143,14 +6962,24 @@ namespace VULKAN_HPP_NAMESPACE case PipelineCreateFlagBits::eDerivative : return "Derivative"; case PipelineCreateFlagBits::eViewIndexFromDeviceIndex : return "ViewIndexFromDeviceIndex"; case PipelineCreateFlagBits::eDispatchBase : return "DispatchBase"; + case PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR : return "RayTracingNoNullAnyHitShadersKHR"; + case PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR : return "RayTracingNoNullClosestHitShadersKHR"; + case PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR : return "RayTracingNoNullMissShadersKHR"; + case PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR : return "RayTracingNoNullIntersectionShadersKHR"; + case PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR : return "RayTracingSkipTrianglesKHR"; + case PipelineCreateFlagBits::eRayTracingSkipAabbsKHR : return "RayTracingSkipAabbsKHR"; case PipelineCreateFlagBits::eDeferCompileNV : return "DeferCompileNV"; case PipelineCreateFlagBits::eCaptureStatisticsKHR : return "CaptureStatisticsKHR"; case PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR : return "CaptureInternalRepresentationsKHR"; + case PipelineCreateFlagBits::eIndirectBindableNV : return "IndirectBindableNV"; + case PipelineCreateFlagBits::eLibraryKHR : return "LibraryKHR"; + case PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT : return "FailOnPipelineCompileRequiredEXT"; + case PipelineCreateFlagBits::eEarlyReturnOnFailureEXT : return "EarlyReturnOnFailureEXT"; default: return "invalid"; } } - enum class PipelineCreationFeedbackFlagBitsEXT + enum class PipelineCreationFeedbackFlagBitsEXT : VkPipelineCreationFeedbackFlagsEXT { eValid = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT, eApplicationPipelineCacheHit = VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT, @@ -6168,22 +6997,6 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class PipelineDepthStencilStateCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlagBits ) - { - return "(void)"; - } - - enum class PipelineDynamicStateCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlagBits ) - { - return "(void)"; - } - enum class PipelineExecutableStatisticFormatKHR { eBool32 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR, @@ -6204,39 +7017,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class PipelineInputAssemblyStateCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlagBits ) - { - return "(void)"; - } - - enum class PipelineLayoutCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlagBits ) - { - return "(void)"; - } - - enum class PipelineMultisampleStateCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlagBits ) - { - return "(void)"; - } - - enum class PipelineRasterizationStateCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlagBits ) - { - return "(void)"; - } - - enum class PipelineShaderStageCreateFlagBits + enum class PipelineShaderStageCreateFlagBits : VkPipelineShaderStageCreateFlags { eAllowVaryingSubgroupSizeEXT = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT, eRequireFullSubgroupsEXT = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT @@ -6252,7 +7033,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class PipelineStageFlagBits + enum class PipelineStageFlagBits : VkPipelineStageFlags { eTopOfPipe = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, eDrawIndirect = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT, @@ -6273,13 +7054,15 @@ namespace VULKAN_HPP_NAMESPACE eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, eTransformFeedbackEXT = VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT, eConditionalRenderingEXT = VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT, - eCommandProcessNVX = VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX, + eRayTracingShaderKHR = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR, + eAccelerationStructureBuildKHR = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV, - eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV, - eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV, eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV, eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV, - eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT + eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT, + eCommandPreprocessNV = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV, + eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV, + eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV }; VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits value ) @@ -6305,41 +7088,17 @@ namespace VULKAN_HPP_NAMESPACE case PipelineStageFlagBits::eAllCommands : return "AllCommands"; case PipelineStageFlagBits::eTransformFeedbackEXT : return "TransformFeedbackEXT"; case PipelineStageFlagBits::eConditionalRenderingEXT : return "ConditionalRenderingEXT"; - case PipelineStageFlagBits::eCommandProcessNVX : return "CommandProcessNVX"; + case PipelineStageFlagBits::eRayTracingShaderKHR : return "RayTracingShaderKHR"; + case PipelineStageFlagBits::eAccelerationStructureBuildKHR : return "AccelerationStructureBuildKHR"; case PipelineStageFlagBits::eShadingRateImageNV : return "ShadingRateImageNV"; - case PipelineStageFlagBits::eRayTracingShaderNV : return "RayTracingShaderNV"; - case PipelineStageFlagBits::eAccelerationStructureBuildNV : return "AccelerationStructureBuildNV"; case PipelineStageFlagBits::eTaskShaderNV : return "TaskShaderNV"; case PipelineStageFlagBits::eMeshShaderNV : return "MeshShaderNV"; case PipelineStageFlagBits::eFragmentDensityProcessEXT : return "FragmentDensityProcessEXT"; + case PipelineStageFlagBits::eCommandPreprocessNV : return "CommandPreprocessNV"; default: return "invalid"; } } - enum class PipelineTessellationStateCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlagBits ) - { - return "(void)"; - } - - enum class PipelineVertexInputStateCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlagBits ) - { - return "(void)"; - } - - enum class PipelineViewportStateCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlagBits ) - { - return "(void)"; - } - enum class PointClippingBehavior { eAllClipPlanes = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, @@ -6435,7 +7194,15 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class QueryControlFlagBits + enum class PrivateDataSlotCreateFlagBitsEXT : VkPrivateDataSlotCreateFlagsEXT + {}; + + VULKAN_HPP_INLINE std::string to_string( PrivateDataSlotCreateFlagBitsEXT ) + { + return "(void)"; + } + + enum class QueryControlFlagBits : VkQueryControlFlags { ePrecise = VK_QUERY_CONTROL_PRECISE_BIT }; @@ -6449,7 +7216,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class QueryPipelineStatisticFlagBits + enum class QueryPipelineStatisticFlagBits : VkQueryPipelineStatisticFlags { eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT, eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT, @@ -6505,7 +7272,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class QueryResultFlagBits + enum class QueryResultFlagBits : VkQueryResultFlags { e64 = VK_QUERY_RESULT_64_BIT, eWait = VK_QUERY_RESULT_WAIT_BIT, @@ -6532,8 +7299,10 @@ namespace VULKAN_HPP_NAMESPACE eTimestamp = VK_QUERY_TYPE_TIMESTAMP, eTransformFeedbackStreamEXT = VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT, ePerformanceQueryKHR = VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, - eAccelerationStructureCompactedSizeNV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV, - ePerformanceQueryINTEL = VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL + eAccelerationStructureCompactedSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR, + eAccelerationStructureSerializationSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR, + ePerformanceQueryINTEL = VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL, + eAccelerationStructureCompactedSizeNV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV }; VULKAN_HPP_INLINE std::string to_string( QueryType value ) @@ -6545,13 +7314,14 @@ namespace VULKAN_HPP_NAMESPACE case QueryType::eTimestamp : return "Timestamp"; case QueryType::eTransformFeedbackStreamEXT : return "TransformFeedbackStreamEXT"; case QueryType::ePerformanceQueryKHR : return "PerformanceQueryKHR"; - case QueryType::eAccelerationStructureCompactedSizeNV : return "AccelerationStructureCompactedSizeNV"; + case QueryType::eAccelerationStructureCompactedSizeKHR : return "AccelerationStructureCompactedSizeKHR"; + case QueryType::eAccelerationStructureSerializationSizeKHR : return "AccelerationStructureSerializationSizeKHR"; case QueryType::ePerformanceQueryINTEL : return "PerformanceQueryINTEL"; default: return "invalid"; } } - enum class QueueFlagBits + enum class QueueFlagBits : VkQueueFlags { eGraphics = VK_QUEUE_GRAPHICS_BIT, eCompute = VK_QUEUE_COMPUTE_BIT, @@ -6609,33 +7379,40 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class RayTracingShaderGroupTypeNV + enum class RayTracingShaderGroupTypeKHR { - eGeneral = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV, - eTrianglesHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV, - eProceduralHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV + eGeneral = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR, + eTrianglesHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR, + eProceduralHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR }; + using RayTracingShaderGroupTypeNV = RayTracingShaderGroupTypeKHR; - VULKAN_HPP_INLINE std::string to_string( RayTracingShaderGroupTypeNV value ) + VULKAN_HPP_INLINE std::string to_string( RayTracingShaderGroupTypeKHR value ) { switch ( value ) { - case RayTracingShaderGroupTypeNV::eGeneral : return "General"; - case RayTracingShaderGroupTypeNV::eTrianglesHitGroup : return "TrianglesHitGroup"; - case RayTracingShaderGroupTypeNV::eProceduralHitGroup : return "ProceduralHitGroup"; + case RayTracingShaderGroupTypeKHR::eGeneral : return "General"; + case RayTracingShaderGroupTypeKHR::eTrianglesHitGroup : return "TrianglesHitGroup"; + case RayTracingShaderGroupTypeKHR::eProceduralHitGroup : return "ProceduralHitGroup"; default: return "invalid"; } } - enum class RenderPassCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlagBits ) + enum class RenderPassCreateFlagBits : VkRenderPassCreateFlags { - return "(void)"; + eTransformQCOM = VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM + }; + + VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlagBits value ) + { + switch ( value ) + { + case RenderPassCreateFlagBits::eTransformQCOM : return "TransformQCOM"; + default: return "invalid"; + } } - enum class ResolveModeFlagBits + enum class ResolveModeFlagBits : VkResolveModeFlags { eNone = VK_RESOLVE_MODE_NONE, eSampleZero = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT, @@ -6690,14 +7467,21 @@ namespace VULKAN_HPP_NAMESPACE eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR, eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT, eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV, + eErrorIncompatibleVersionKHR = VK_ERROR_INCOMPATIBLE_VERSION_KHR, eErrorInvalidDrmFormatModifierPlaneLayoutEXT = VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT, eErrorNotPermittedEXT = VK_ERROR_NOT_PERMITTED_EXT, eErrorFullScreenExclusiveModeLostEXT = VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT, - eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY_KHR, - eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR, + eThreadIdleKHR = VK_THREAD_IDLE_KHR, + eThreadDoneKHR = VK_THREAD_DONE_KHR, + eOperationDeferredKHR = VK_OPERATION_DEFERRED_KHR, + eOperationNotDeferredKHR = VK_OPERATION_NOT_DEFERRED_KHR, + ePipelineCompileRequiredEXT = VK_PIPELINE_COMPILE_REQUIRED_EXT, eErrorFragmentationEXT = VK_ERROR_FRAGMENTATION_EXT, eErrorInvalidDeviceAddressEXT = VK_ERROR_INVALID_DEVICE_ADDRESS_EXT, - eErrorInvalidOpaqueCaptureAddressKHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR + eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR, + eErrorInvalidOpaqueCaptureAddressKHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR, + eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY_KHR, + eErrorPipelineCompileRequiredEXT = VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT }; VULKAN_HPP_INLINE std::string to_string( Result value ) @@ -6734,14 +7518,20 @@ namespace VULKAN_HPP_NAMESPACE case Result::eErrorIncompatibleDisplayKHR : return "ErrorIncompatibleDisplayKHR"; case Result::eErrorValidationFailedEXT : return "ErrorValidationFailedEXT"; case Result::eErrorInvalidShaderNV : return "ErrorInvalidShaderNV"; + case Result::eErrorIncompatibleVersionKHR : return "ErrorIncompatibleVersionKHR"; case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT : return "ErrorInvalidDrmFormatModifierPlaneLayoutEXT"; case Result::eErrorNotPermittedEXT : return "ErrorNotPermittedEXT"; case Result::eErrorFullScreenExclusiveModeLostEXT : return "ErrorFullScreenExclusiveModeLostEXT"; + case Result::eThreadIdleKHR : return "ThreadIdleKHR"; + case Result::eThreadDoneKHR : return "ThreadDoneKHR"; + case Result::eOperationDeferredKHR : return "OperationDeferredKHR"; + case Result::eOperationNotDeferredKHR : return "OperationNotDeferredKHR"; + case Result::ePipelineCompileRequiredEXT : return "PipelineCompileRequiredEXT"; default: return "invalid"; } } - enum class SampleCountFlagBits + enum class SampleCountFlagBits : VkSampleCountFlags { e1 = VK_SAMPLE_COUNT_1_BIT, e2 = VK_SAMPLE_COUNT_2_BIT, @@ -6790,7 +7580,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class SamplerCreateFlagBits + enum class SamplerCreateFlagBits : VkSamplerCreateFlags { eSubsampledEXT = VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT, eSubsampledCoarseReconstructionEXT = VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT @@ -6901,7 +7691,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class SemaphoreCreateFlagBits + enum class SemaphoreCreateFlagBits : VkSemaphoreCreateFlags {}; VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlagBits ) @@ -6909,7 +7699,7 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - enum class SemaphoreImportFlagBits + enum class SemaphoreImportFlagBits : VkSemaphoreImportFlags { eTemporary = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT }; @@ -6941,7 +7731,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class SemaphoreWaitFlagBits + enum class SemaphoreWaitFlagBits : VkSemaphoreWaitFlags { eAny = VK_SEMAPHORE_WAIT_ANY_BIT }; @@ -6956,7 +7746,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class ShaderCorePropertiesFlagBitsAMD + enum class ShaderCorePropertiesFlagBitsAMD : VkShaderCorePropertiesFlagsAMD {}; VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagBitsAMD ) @@ -7001,7 +7791,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class ShaderModuleCreateFlagBits + enum class ShaderModuleCreateFlagBits : VkShaderModuleCreateFlags {}; VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlagBits ) @@ -7009,7 +7799,7 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - enum class ShaderStageFlagBits + enum class ShaderStageFlagBits : VkShaderStageFlags { eVertex = VK_SHADER_STAGE_VERTEX_BIT, eTessellationControl = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, @@ -7019,14 +7809,20 @@ namespace VULKAN_HPP_NAMESPACE eCompute = VK_SHADER_STAGE_COMPUTE_BIT, eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS, eAll = VK_SHADER_STAGE_ALL, - eRaygenNV = VK_SHADER_STAGE_RAYGEN_BIT_NV, - eAnyHitNV = VK_SHADER_STAGE_ANY_HIT_BIT_NV, - eClosestHitNV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV, - eMissNV = VK_SHADER_STAGE_MISS_BIT_NV, - eIntersectionNV = VK_SHADER_STAGE_INTERSECTION_BIT_NV, - eCallableNV = VK_SHADER_STAGE_CALLABLE_BIT_NV, + eRaygenKHR = VK_SHADER_STAGE_RAYGEN_BIT_KHR, + eAnyHitKHR = VK_SHADER_STAGE_ANY_HIT_BIT_KHR, + eClosestHitKHR = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR, + eMissKHR = VK_SHADER_STAGE_MISS_BIT_KHR, + eIntersectionKHR = VK_SHADER_STAGE_INTERSECTION_BIT_KHR, + eCallableKHR = VK_SHADER_STAGE_CALLABLE_BIT_KHR, eTaskNV = VK_SHADER_STAGE_TASK_BIT_NV, - eMeshNV = VK_SHADER_STAGE_MESH_BIT_NV + eMeshNV = VK_SHADER_STAGE_MESH_BIT_NV, + eAnyHitNV = VK_SHADER_STAGE_ANY_HIT_BIT_NV, + eCallableNV = VK_SHADER_STAGE_CALLABLE_BIT_NV, + eClosestHitNV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV, + eIntersectionNV = VK_SHADER_STAGE_INTERSECTION_BIT_NV, + eMissNV = VK_SHADER_STAGE_MISS_BIT_NV, + eRaygenNV = VK_SHADER_STAGE_RAYGEN_BIT_NV }; VULKAN_HPP_INLINE std::string to_string( ShaderStageFlagBits value ) @@ -7041,12 +7837,12 @@ namespace VULKAN_HPP_NAMESPACE case ShaderStageFlagBits::eCompute : return "Compute"; case ShaderStageFlagBits::eAllGraphics : return "AllGraphics"; case ShaderStageFlagBits::eAll : return "All"; - case ShaderStageFlagBits::eRaygenNV : return "RaygenNV"; - case ShaderStageFlagBits::eAnyHitNV : return "AnyHitNV"; - case ShaderStageFlagBits::eClosestHitNV : return "ClosestHitNV"; - case ShaderStageFlagBits::eMissNV : return "MissNV"; - case ShaderStageFlagBits::eIntersectionNV : return "IntersectionNV"; - case ShaderStageFlagBits::eCallableNV : return "CallableNV"; + case ShaderStageFlagBits::eRaygenKHR : return "RaygenKHR"; + case ShaderStageFlagBits::eAnyHitKHR : return "AnyHitKHR"; + case ShaderStageFlagBits::eClosestHitKHR : return "ClosestHitKHR"; + case ShaderStageFlagBits::eMissKHR : return "MissKHR"; + case ShaderStageFlagBits::eIntersectionKHR : return "IntersectionKHR"; + case ShaderStageFlagBits::eCallableKHR : return "CallableKHR"; case ShaderStageFlagBits::eTaskNV : return "TaskNV"; case ShaderStageFlagBits::eMeshNV : return "MeshNV"; default: return "invalid"; @@ -7105,7 +7901,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class SparseImageFormatFlagBits + enum class SparseImageFormatFlagBits : VkSparseImageFormatFlags { eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT, eAlignedMipSize = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT, @@ -7123,7 +7919,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class SparseMemoryBindFlagBits + enum class SparseMemoryBindFlagBits : VkSparseMemoryBindFlags { eMetadata = VK_SPARSE_MEMORY_BIND_METADATA_BIT }; @@ -7137,7 +7933,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class StencilFaceFlagBits + enum class StencilFaceFlagBits : VkStencilFaceFlags { eFront = VK_STENCIL_FACE_FRONT_BIT, eBack = VK_STENCIL_FACE_BACK_BIT, @@ -7378,6 +8174,7 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceTransformFeedbackPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT, ePipelineRasterizationStateStreamCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT, eImageViewHandleInfoNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX, + eImageViewAddressPropertiesNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX, eTextureLodGatherFormatPropertiesAMD = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD, eStreamDescriptorSurfaceCreateInfoGGP = VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP, ePhysicalDeviceCornerSampledImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV, @@ -7410,12 +8207,6 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceConditionalRenderingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT, eConditionalRenderingBeginInfoEXT = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT, ePresentRegionsKHR = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR, - eObjectTableCreateInfoNVX = VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX, - eIndirectCommandsLayoutCreateInfoNVX = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX, - eCmdProcessCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX, - eCmdReserveSpaceForCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX, - eDeviceGeneratedCommandsLimitsNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX, - eDeviceGeneratedCommandsFeaturesNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX, ePipelineViewportWScalingStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV, eSurfaceCapabilities2EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT, eDisplayPowerInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT, @@ -7479,6 +8270,26 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceBlendOperationAdvancedPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT, ePipelineColorBlendAdvancedStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT, ePipelineCoverageToColorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV, + eBindAccelerationStructureMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_KHR, + eWriteDescriptorSetAccelerationStructureKHR = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR, + eAccelerationStructureBuildGeometryInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR, + eAccelerationStructureCreateGeometryTypeInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_GEOMETRY_TYPE_INFO_KHR, + eAccelerationStructureDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR, + eAccelerationStructureGeometryAabbsDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR, + eAccelerationStructureGeometryInstancesDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR, + eAccelerationStructureGeometryTrianglesDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR, + eAccelerationStructureGeometryKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR, + eAccelerationStructureMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR, + eAccelerationStructureVersionKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_KHR, + eCopyAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR, + eCopyAccelerationStructureToMemoryInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR, + eCopyMemoryToAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR, + ePhysicalDeviceRayTracingFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_FEATURES_KHR, + ePhysicalDeviceRayTracingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_KHR, + eRayTracingPipelineCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR, + eRayTracingShaderGroupCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR, + eAccelerationStructureCreateInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR, + eRayTracingPipelineInterfaceCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR, ePipelineCoverageModulationStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV, ePhysicalDeviceShaderSmBuiltinsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV, ePhysicalDeviceShaderSmBuiltinsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV, @@ -7490,6 +8301,8 @@ namespace VULKAN_HPP_NAMESPACE eImageDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, eValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT, eShaderModuleValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT, + ePhysicalDevicePortabilitySubsetFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR, + ePhysicalDevicePortabilitySubsetPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR, ePipelineViewportShadingRateImageStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV, ePhysicalDeviceShadingRateImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV, ePhysicalDeviceShadingRateImagePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV, @@ -7499,8 +8312,6 @@ namespace VULKAN_HPP_NAMESPACE eGeometryNV = VK_STRUCTURE_TYPE_GEOMETRY_NV, eGeometryTrianglesNV = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV, eGeometryAabbNV = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV, - eBindAccelerationStructureMemoryInfoNV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV, - eWriteDescriptorSetAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV, eAccelerationStructureMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV, ePhysicalDeviceRayTracingPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV, eRayTracingShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV, @@ -7533,7 +8344,7 @@ namespace VULKAN_HPP_NAMESPACE eCheckpointDataNV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV, eQueueFamilyCheckpointPropertiesNV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV, ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL, - eQueryPoolCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL, + eQueryPoolPerformanceQueryCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL, eInitializePerformanceApiInfoINTEL = VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL, ePerformanceMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL, ePerformanceStreamMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL, @@ -7576,7 +8387,10 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceLineRasterizationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT, ePipelineRasterizationLineStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT, ePhysicalDeviceLineRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT, + ePhysicalDeviceShaderAtomicFloatFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT, ePhysicalDeviceIndexTypeUint8FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT, + ePhysicalDeviceExtendedDynamicStateFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT, + eDeferredOperationInfoKHR = VK_STRUCTURE_TYPE_DEFERRED_OPERATION_INFO_KHR, ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR, ePipelineInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR, ePipelineExecutablePropertiesKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR, @@ -7584,120 +8398,161 @@ namespace VULKAN_HPP_NAMESPACE ePipelineExecutableStatisticKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR, ePipelineExecutableInternalRepresentationKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR, ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT, + ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV, + eGraphicsShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV, + eGraphicsPipelineShaderGroupsCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV, + eIndirectCommandsLayoutTokenNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV, + eIndirectCommandsLayoutCreateInfoNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV, + eGeneratedCommandsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV, + eGeneratedCommandsMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV, + ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV, ePhysicalDeviceTexelBufferAlignmentFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT, ePhysicalDeviceTexelBufferAlignmentPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT, - ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES, - ePhysicalDeviceShaderDrawParameterFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES, - eDebugReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT, - eRenderPassMultiviewCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR, - ePhysicalDeviceMultiviewFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR, - ePhysicalDeviceMultiviewPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR, - ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR, - ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR, - eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR, - eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR, - ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR, - eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR, - ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR, - eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR, - ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR, - eMemoryAllocateFlagsInfoKHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR, - eDeviceGroupRenderPassBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR, - eDeviceGroupCommandBufferBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR, - eDeviceGroupSubmitInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR, - eDeviceGroupBindSparseInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR, - eBindBufferMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR, - eBindImageMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR, - ePhysicalDeviceGroupPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR, - eDeviceGroupDeviceCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR, - ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR, - eExternalImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR, - ePhysicalDeviceExternalBufferInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR, - eExternalBufferPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR, - ePhysicalDeviceIdPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR, - eExternalMemoryBufferCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR, - eExternalMemoryImageCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR, - eExportMemoryAllocateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR, - ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR, - eExternalSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR, - eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR, - ePhysicalDeviceShaderFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR, - ePhysicalDeviceFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR, - ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR, - eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR, - ePhysicalDeviceImagelessFramebufferFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR, - eFramebufferAttachmentsCreateInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR, - eFramebufferAttachmentImageInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR, - eRenderPassAttachmentBeginInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR, + eCommandBufferInheritanceRenderPassTransformInfoQCOM = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM, + eRenderPassTransformBeginInfoQCOM = VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM, + ePhysicalDeviceRobustness2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT, + ePhysicalDeviceRobustness2PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT, + eSamplerCustomBorderColorCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT, + ePhysicalDeviceCustomBorderColorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT, + ePhysicalDeviceCustomBorderColorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT, + ePipelineLibraryCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR, + ePhysicalDevicePrivateDataFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT, + eDevicePrivateDataCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT, + ePrivateDataSlotCreateInfoEXT = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT, + ePhysicalDevicePipelineCreationCacheControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT, + ePhysicalDeviceDiagnosticsConfigFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV, + eDeviceDiagnosticsConfigCreateInfoNV = VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV, + ePhysicalDeviceFragmentDensityMap2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT, + ePhysicalDeviceFragmentDensityMap2PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT, + ePhysicalDeviceImageRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT, + eCopyBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR, + eCopyImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR, + eCopyBufferToImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR, + eCopyImageToBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR, + eBlitImageInfo2KHR = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR, + eResolveImageInfo2KHR = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR, + eBufferCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR, + eImageCopy2KHR = VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR, + eImageBlit2KHR = VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR, + eBufferImageCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR, + eImageResolve2KHR = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR, + ePhysicalDevice4444FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT, + eDirectfbSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT, eAttachmentDescription2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR, + eAttachmentDescriptionStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR, eAttachmentReference2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR, - eSubpassDescription2KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR, - eSubpassDependency2KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR, - eRenderPassCreateInfo2KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR, - eSubpassBeginInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR, - eSubpassEndInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR, - ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR, - eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR, - eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR, - ePhysicalDevicePointClippingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR, - eRenderPassInputAttachmentAspectCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR, - eImageViewUsageCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR, - ePipelineTessellationDomainOriginStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR, - ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR, - ePhysicalDeviceVariablePointersFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR, - eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR, - eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR, - ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT, - eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT, - eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR, - eImageMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR, - eImageSparseMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR, - eMemoryRequirements2KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR, - eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR, - eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR, - eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR, - eSamplerYcbcrConversionInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR, - eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR, - eImagePlaneMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR, - ePhysicalDeviceSamplerYcbcrConversionFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR, - eSamplerYcbcrConversionImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR, + eAttachmentReferenceStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR, + eBindAccelerationStructureMemoryInfoNV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV, + eBindBufferMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR, eBindBufferMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR, + eBindImageMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR, eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR, + eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR, + eBufferDeviceAddressInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT, + eBufferDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR, + eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR, + eBufferOpaqueCaptureAddressCreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR, + eDebugReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT, eDescriptorSetLayoutBindingFlagsCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT, - ePhysicalDeviceDescriptorIndexingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT, - ePhysicalDeviceDescriptorIndexingPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT, + eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR, eDescriptorSetVariableDescriptorCountAllocateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT, eDescriptorSetVariableDescriptorCountLayoutSupportEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT, - ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR, - eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR, - ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR, + eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR, + eDeviceGroupBindSparseInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR, + eDeviceGroupCommandBufferBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR, + eDeviceGroupDeviceCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR, + eDeviceGroupRenderPassBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR, + eDeviceGroupSubmitInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR, + eDeviceMemoryOpaqueCaptureAddressInfoKHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR, + eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR, + eExportMemoryAllocateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR, + eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR, + eExternalBufferPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR, + eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR, + eExternalImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR, + eExternalMemoryBufferCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR, + eExternalMemoryImageCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR, + eExternalSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR, + eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR, + eFramebufferAttachmentsCreateInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR, + eFramebufferAttachmentImageInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR, + eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR, + eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR, + eImageMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR, + eImagePlaneMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR, + eImageSparseMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR, + eImageStencilUsageCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT, + eImageViewUsageCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR, + eMemoryAllocateFlagsInfoKHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR, + eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR, + eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR, + eMemoryOpaqueCaptureAddressAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR, + eMemoryRequirements2KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR, + ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR, ePhysicalDevice8BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR, - ePhysicalDeviceShaderAtomicInt64FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR, - ePhysicalDeviceDriverPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR, - ePhysicalDeviceFloatControlsPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR, + ePhysicalDeviceBufferAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT, + ePhysicalDeviceBufferDeviceAddressFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR, ePhysicalDeviceDepthStencilResolvePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR, - eSubpassDescriptionDepthStencilResolveKHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR, - ePhysicalDeviceTimelineSemaphoreFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR, - ePhysicalDeviceTimelineSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR, - eSemaphoreTypeCreateInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR, - eTimelineSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR, - eSemaphoreWaitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR, - eSemaphoreSignalInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR, - ePhysicalDeviceVulkanMemoryModelFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR, + ePhysicalDeviceDescriptorIndexingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT, + ePhysicalDeviceDescriptorIndexingPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT, + ePhysicalDeviceDriverPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR, + ePhysicalDeviceExternalBufferInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR, + ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR, + ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR, + ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR, + ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR, + ePhysicalDeviceFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR, + ePhysicalDeviceFloatControlsPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR, + ePhysicalDeviceGroupPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR, + ePhysicalDeviceHostQueryResetFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT, + ePhysicalDeviceIdPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR, + ePhysicalDeviceImagelessFramebufferFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR, + ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR, + ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR, + ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR, + ePhysicalDeviceMultiviewFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR, + ePhysicalDeviceMultiviewPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR, + ePhysicalDevicePointClippingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR, + ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR, + ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT, + ePhysicalDeviceSamplerYcbcrConversionFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR, ePhysicalDeviceScalarBlockLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT, ePhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR, - eAttachmentReferenceStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR, - eAttachmentDescriptionStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR, - ePhysicalDeviceBufferAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT, - eBufferDeviceAddressInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT, - eImageStencilUsageCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT, + ePhysicalDeviceShaderAtomicInt64FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR, + ePhysicalDeviceShaderDrawParameterFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES, + ePhysicalDeviceShaderFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR, + ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR, + ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR, + ePhysicalDeviceTimelineSemaphoreFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR, + ePhysicalDeviceTimelineSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR, ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR, - ePhysicalDeviceBufferDeviceAddressFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR, - eBufferDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR, - eBufferOpaqueCaptureAddressCreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR, - eMemoryOpaqueCaptureAddressAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR, - eDeviceMemoryOpaqueCaptureAddressInfoKHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR, - ePhysicalDeviceHostQueryResetFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT + ePhysicalDeviceVariablePointersFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR, + ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES, + ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR, + ePhysicalDeviceVulkanMemoryModelFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR, + ePipelineTessellationDomainOriginStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR, + eQueryPoolCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL, + eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR, + eRenderPassAttachmentBeginInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR, + eRenderPassCreateInfo2KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR, + eRenderPassInputAttachmentAspectCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR, + eRenderPassMultiviewCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR, + eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT, + eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR, + eSamplerYcbcrConversionImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR, + eSamplerYcbcrConversionInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR, + eSemaphoreSignalInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR, + eSemaphoreTypeCreateInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR, + eSemaphoreWaitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR, + eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR, + eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR, + eSubpassBeginInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR, + eSubpassDependency2KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR, + eSubpassDescription2KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR, + eSubpassDescriptionDepthStencilResolveKHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR, + eSubpassEndInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR, + eTimelineSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR, + eWriteDescriptorSetAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV }; VULKAN_HPP_INLINE std::string to_string( StructureType value ) @@ -7896,6 +8751,7 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT : return "PhysicalDeviceTransformFeedbackPropertiesEXT"; case StructureType::ePipelineRasterizationStateStreamCreateInfoEXT : return "PipelineRasterizationStateStreamCreateInfoEXT"; case StructureType::eImageViewHandleInfoNVX : return "ImageViewHandleInfoNVX"; + case StructureType::eImageViewAddressPropertiesNVX : return "ImageViewAddressPropertiesNVX"; case StructureType::eTextureLodGatherFormatPropertiesAMD : return "TextureLodGatherFormatPropertiesAMD"; case StructureType::eStreamDescriptorSurfaceCreateInfoGGP : return "StreamDescriptorSurfaceCreateInfoGGP"; case StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV : return "PhysicalDeviceCornerSampledImageFeaturesNV"; @@ -7928,12 +8784,6 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT : return "PhysicalDeviceConditionalRenderingFeaturesEXT"; case StructureType::eConditionalRenderingBeginInfoEXT : return "ConditionalRenderingBeginInfoEXT"; case StructureType::ePresentRegionsKHR : return "PresentRegionsKHR"; - case StructureType::eObjectTableCreateInfoNVX : return "ObjectTableCreateInfoNVX"; - case StructureType::eIndirectCommandsLayoutCreateInfoNVX : return "IndirectCommandsLayoutCreateInfoNVX"; - case StructureType::eCmdProcessCommandsInfoNVX : return "CmdProcessCommandsInfoNVX"; - case StructureType::eCmdReserveSpaceForCommandsInfoNVX : return "CmdReserveSpaceForCommandsInfoNVX"; - case StructureType::eDeviceGeneratedCommandsLimitsNVX : return "DeviceGeneratedCommandsLimitsNVX"; - case StructureType::eDeviceGeneratedCommandsFeaturesNVX : return "DeviceGeneratedCommandsFeaturesNVX"; case StructureType::ePipelineViewportWScalingStateCreateInfoNV : return "PipelineViewportWScalingStateCreateInfoNV"; case StructureType::eSurfaceCapabilities2EXT : return "SurfaceCapabilities2EXT"; case StructureType::eDisplayPowerInfoEXT : return "DisplayPowerInfoEXT"; @@ -7997,6 +8847,26 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT : return "PhysicalDeviceBlendOperationAdvancedPropertiesEXT"; case StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT : return "PipelineColorBlendAdvancedStateCreateInfoEXT"; case StructureType::ePipelineCoverageToColorStateCreateInfoNV : return "PipelineCoverageToColorStateCreateInfoNV"; + case StructureType::eBindAccelerationStructureMemoryInfoKHR : return "BindAccelerationStructureMemoryInfoKHR"; + case StructureType::eWriteDescriptorSetAccelerationStructureKHR : return "WriteDescriptorSetAccelerationStructureKHR"; + case StructureType::eAccelerationStructureBuildGeometryInfoKHR : return "AccelerationStructureBuildGeometryInfoKHR"; + case StructureType::eAccelerationStructureCreateGeometryTypeInfoKHR : return "AccelerationStructureCreateGeometryTypeInfoKHR"; + case StructureType::eAccelerationStructureDeviceAddressInfoKHR : return "AccelerationStructureDeviceAddressInfoKHR"; + case StructureType::eAccelerationStructureGeometryAabbsDataKHR : return "AccelerationStructureGeometryAabbsDataKHR"; + case StructureType::eAccelerationStructureGeometryInstancesDataKHR : return "AccelerationStructureGeometryInstancesDataKHR"; + case StructureType::eAccelerationStructureGeometryTrianglesDataKHR : return "AccelerationStructureGeometryTrianglesDataKHR"; + case StructureType::eAccelerationStructureGeometryKHR : return "AccelerationStructureGeometryKHR"; + case StructureType::eAccelerationStructureMemoryRequirementsInfoKHR : return "AccelerationStructureMemoryRequirementsInfoKHR"; + case StructureType::eAccelerationStructureVersionKHR : return "AccelerationStructureVersionKHR"; + case StructureType::eCopyAccelerationStructureInfoKHR : return "CopyAccelerationStructureInfoKHR"; + case StructureType::eCopyAccelerationStructureToMemoryInfoKHR : return "CopyAccelerationStructureToMemoryInfoKHR"; + case StructureType::eCopyMemoryToAccelerationStructureInfoKHR : return "CopyMemoryToAccelerationStructureInfoKHR"; + case StructureType::ePhysicalDeviceRayTracingFeaturesKHR : return "PhysicalDeviceRayTracingFeaturesKHR"; + case StructureType::ePhysicalDeviceRayTracingPropertiesKHR : return "PhysicalDeviceRayTracingPropertiesKHR"; + case StructureType::eRayTracingPipelineCreateInfoKHR : return "RayTracingPipelineCreateInfoKHR"; + case StructureType::eRayTracingShaderGroupCreateInfoKHR : return "RayTracingShaderGroupCreateInfoKHR"; + case StructureType::eAccelerationStructureCreateInfoKHR : return "AccelerationStructureCreateInfoKHR"; + case StructureType::eRayTracingPipelineInterfaceCreateInfoKHR : return "RayTracingPipelineInterfaceCreateInfoKHR"; case StructureType::ePipelineCoverageModulationStateCreateInfoNV : return "PipelineCoverageModulationStateCreateInfoNV"; case StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV : return "PhysicalDeviceShaderSmBuiltinsFeaturesNV"; case StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV : return "PhysicalDeviceShaderSmBuiltinsPropertiesNV"; @@ -8008,6 +8878,8 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eImageDrmFormatModifierPropertiesEXT : return "ImageDrmFormatModifierPropertiesEXT"; case StructureType::eValidationCacheCreateInfoEXT : return "ValidationCacheCreateInfoEXT"; case StructureType::eShaderModuleValidationCacheCreateInfoEXT : return "ShaderModuleValidationCacheCreateInfoEXT"; + case StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR : return "PhysicalDevicePortabilitySubsetFeaturesKHR"; + case StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR : return "PhysicalDevicePortabilitySubsetPropertiesKHR"; case StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV : return "PipelineViewportShadingRateImageStateCreateInfoNV"; case StructureType::ePhysicalDeviceShadingRateImageFeaturesNV : return "PhysicalDeviceShadingRateImageFeaturesNV"; case StructureType::ePhysicalDeviceShadingRateImagePropertiesNV : return "PhysicalDeviceShadingRateImagePropertiesNV"; @@ -8017,8 +8889,6 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eGeometryNV : return "GeometryNV"; case StructureType::eGeometryTrianglesNV : return "GeometryTrianglesNV"; case StructureType::eGeometryAabbNV : return "GeometryAabbNV"; - case StructureType::eBindAccelerationStructureMemoryInfoNV : return "BindAccelerationStructureMemoryInfoNV"; - case StructureType::eWriteDescriptorSetAccelerationStructureNV : return "WriteDescriptorSetAccelerationStructureNV"; case StructureType::eAccelerationStructureMemoryRequirementsInfoNV : return "AccelerationStructureMemoryRequirementsInfoNV"; case StructureType::ePhysicalDeviceRayTracingPropertiesNV : return "PhysicalDeviceRayTracingPropertiesNV"; case StructureType::eRayTracingShaderGroupCreateInfoNV : return "RayTracingShaderGroupCreateInfoNV"; @@ -8051,7 +8921,7 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eCheckpointDataNV : return "CheckpointDataNV"; case StructureType::eQueueFamilyCheckpointPropertiesNV : return "QueueFamilyCheckpointPropertiesNV"; case StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL : return "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL"; - case StructureType::eQueryPoolCreateInfoINTEL : return "QueryPoolCreateInfoINTEL"; + case StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL : return "QueryPoolPerformanceQueryCreateInfoINTEL"; case StructureType::eInitializePerformanceApiInfoINTEL : return "InitializePerformanceApiInfoINTEL"; case StructureType::ePerformanceMarkerInfoINTEL : return "PerformanceMarkerInfoINTEL"; case StructureType::ePerformanceStreamMarkerInfoINTEL : return "PerformanceStreamMarkerInfoINTEL"; @@ -8094,7 +8964,10 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT : return "PhysicalDeviceLineRasterizationFeaturesEXT"; case StructureType::ePipelineRasterizationLineStateCreateInfoEXT : return "PipelineRasterizationLineStateCreateInfoEXT"; case StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT : return "PhysicalDeviceLineRasterizationPropertiesEXT"; + case StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT : return "PhysicalDeviceShaderAtomicFloatFeaturesEXT"; case StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT : return "PhysicalDeviceIndexTypeUint8FeaturesEXT"; + case StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT : return "PhysicalDeviceExtendedDynamicStateFeaturesEXT"; + case StructureType::eDeferredOperationInfoKHR : return "DeferredOperationInfoKHR"; case StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR : return "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR"; case StructureType::ePipelineInfoKHR : return "PipelineInfoKHR"; case StructureType::ePipelineExecutablePropertiesKHR : return "PipelineExecutablePropertiesKHR"; @@ -8102,13 +8975,51 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePipelineExecutableStatisticKHR : return "PipelineExecutableStatisticKHR"; case StructureType::ePipelineExecutableInternalRepresentationKHR : return "PipelineExecutableInternalRepresentationKHR"; case StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT : return "PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT"; + case StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV : return "PhysicalDeviceDeviceGeneratedCommandsPropertiesNV"; + case StructureType::eGraphicsShaderGroupCreateInfoNV : return "GraphicsShaderGroupCreateInfoNV"; + case StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV : return "GraphicsPipelineShaderGroupsCreateInfoNV"; + case StructureType::eIndirectCommandsLayoutTokenNV : return "IndirectCommandsLayoutTokenNV"; + case StructureType::eIndirectCommandsLayoutCreateInfoNV : return "IndirectCommandsLayoutCreateInfoNV"; + case StructureType::eGeneratedCommandsInfoNV : return "GeneratedCommandsInfoNV"; + case StructureType::eGeneratedCommandsMemoryRequirementsInfoNV : return "GeneratedCommandsMemoryRequirementsInfoNV"; + case StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV : return "PhysicalDeviceDeviceGeneratedCommandsFeaturesNV"; case StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT : return "PhysicalDeviceTexelBufferAlignmentFeaturesEXT"; case StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT : return "PhysicalDeviceTexelBufferAlignmentPropertiesEXT"; + case StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM : return "CommandBufferInheritanceRenderPassTransformInfoQCOM"; + case StructureType::eRenderPassTransformBeginInfoQCOM : return "RenderPassTransformBeginInfoQCOM"; + case StructureType::ePhysicalDeviceRobustness2FeaturesEXT : return "PhysicalDeviceRobustness2FeaturesEXT"; + case StructureType::ePhysicalDeviceRobustness2PropertiesEXT : return "PhysicalDeviceRobustness2PropertiesEXT"; + case StructureType::eSamplerCustomBorderColorCreateInfoEXT : return "SamplerCustomBorderColorCreateInfoEXT"; + case StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT : return "PhysicalDeviceCustomBorderColorPropertiesEXT"; + case StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT : return "PhysicalDeviceCustomBorderColorFeaturesEXT"; + case StructureType::ePipelineLibraryCreateInfoKHR : return "PipelineLibraryCreateInfoKHR"; + case StructureType::ePhysicalDevicePrivateDataFeaturesEXT : return "PhysicalDevicePrivateDataFeaturesEXT"; + case StructureType::eDevicePrivateDataCreateInfoEXT : return "DevicePrivateDataCreateInfoEXT"; + case StructureType::ePrivateDataSlotCreateInfoEXT : return "PrivateDataSlotCreateInfoEXT"; + case StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT : return "PhysicalDevicePipelineCreationCacheControlFeaturesEXT"; + case StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV : return "PhysicalDeviceDiagnosticsConfigFeaturesNV"; + case StructureType::eDeviceDiagnosticsConfigCreateInfoNV : return "DeviceDiagnosticsConfigCreateInfoNV"; + case StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT : return "PhysicalDeviceFragmentDensityMap2FeaturesEXT"; + case StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT : return "PhysicalDeviceFragmentDensityMap2PropertiesEXT"; + case StructureType::ePhysicalDeviceImageRobustnessFeaturesEXT : return "PhysicalDeviceImageRobustnessFeaturesEXT"; + case StructureType::eCopyBufferInfo2KHR : return "CopyBufferInfo2KHR"; + case StructureType::eCopyImageInfo2KHR : return "CopyImageInfo2KHR"; + case StructureType::eCopyBufferToImageInfo2KHR : return "CopyBufferToImageInfo2KHR"; + case StructureType::eCopyImageToBufferInfo2KHR : return "CopyImageToBufferInfo2KHR"; + case StructureType::eBlitImageInfo2KHR : return "BlitImageInfo2KHR"; + case StructureType::eResolveImageInfo2KHR : return "ResolveImageInfo2KHR"; + case StructureType::eBufferCopy2KHR : return "BufferCopy2KHR"; + case StructureType::eImageCopy2KHR : return "ImageCopy2KHR"; + case StructureType::eImageBlit2KHR : return "ImageBlit2KHR"; + case StructureType::eBufferImageCopy2KHR : return "BufferImageCopy2KHR"; + case StructureType::eImageResolve2KHR : return "ImageResolve2KHR"; + case StructureType::ePhysicalDevice4444FormatsFeaturesEXT : return "PhysicalDevice4444FormatsFeaturesEXT"; + case StructureType::eDirectfbSurfaceCreateInfoEXT : return "DirectfbSurfaceCreateInfoEXT"; default: return "invalid"; } } - enum class SubgroupFeatureFlagBits + enum class SubgroupFeatureFlagBits : VkSubgroupFeatureFlags { eBasic = VK_SUBGROUP_FEATURE_BASIC_BIT, eVote = VK_SUBGROUP_FEATURE_VOTE_BIT, @@ -8154,10 +9065,12 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class SubpassDescriptionFlagBits + enum class SubpassDescriptionFlagBits : VkSubpassDescriptionFlags { ePerViewAttributesNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX, - ePerViewPositionXOnlyNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX + ePerViewPositionXOnlyNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX, + eFragmentRegionQCOM = VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM, + eShaderResolveQCOM = VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM }; VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlagBits value ) @@ -8166,11 +9079,13 @@ namespace VULKAN_HPP_NAMESPACE { case SubpassDescriptionFlagBits::ePerViewAttributesNVX : return "PerViewAttributesNVX"; case SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX : return "PerViewPositionXOnlyNVX"; + case SubpassDescriptionFlagBits::eFragmentRegionQCOM : return "FragmentRegionQCOM"; + case SubpassDescriptionFlagBits::eShaderResolveQCOM : return "ShaderResolveQCOM"; default: return "invalid"; } } - enum class SurfaceCounterFlagBitsEXT + enum class SurfaceCounterFlagBitsEXT : VkSurfaceCounterFlagsEXT { eVblank = VK_SURFACE_COUNTER_VBLANK_EXT }; @@ -8184,7 +9099,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class SurfaceTransformFlagBitsKHR + enum class SurfaceTransformFlagBitsKHR : VkSurfaceTransformFlagsKHR { eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR, eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR, @@ -8214,7 +9129,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class SwapchainCreateFlagBitsKHR + enum class SwapchainCreateFlagBitsKHR : VkSwapchainCreateFlagsKHR { eSplitInstanceBindRegions = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR, eProtected = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR, @@ -8291,7 +9206,7 @@ namespace VULKAN_HPP_NAMESPACE } } - enum class ToolPurposeFlagBitsEXT + enum class ToolPurposeFlagBitsEXT : VkToolPurposeFlagsEXT { eValidation = VK_TOOL_PURPOSE_VALIDATION_BIT_EXT, eProfiling = VK_TOOL_PURPOSE_PROFILING_BIT_EXT, @@ -8377,7 +9292,9 @@ namespace VULKAN_HPP_NAMESPACE { eGpuAssisted = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT, eGpuAssistedReserveBindingSlot = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT, - eBestPractices = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT + eBestPractices = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT, + eDebugPrintf = VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT, + eSynchronizationValidation = VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT }; VULKAN_HPP_INLINE std::string to_string( ValidationFeatureEnableEXT value ) @@ -8387,6 +9304,8 @@ namespace VULKAN_HPP_NAMESPACE case ValidationFeatureEnableEXT::eGpuAssisted : return "GpuAssisted"; case ValidationFeatureEnableEXT::eGpuAssistedReserveBindingSlot : return "GpuAssistedReserveBindingSlot"; case ValidationFeatureEnableEXT::eBestPractices : return "BestPractices"; + case ValidationFeatureEnableEXT::eDebugPrintf : return "DebugPrintf"; + case ValidationFeatureEnableEXT::eSynchronizationValidation : return "SynchronizationValidation"; default: return "invalid"; } } @@ -8395,7 +9314,9 @@ namespace VULKAN_HPP_NAMESPACE { eVIV = VK_VENDOR_ID_VIV, eVSI = VK_VENDOR_ID_VSI, - eKazan = VK_VENDOR_ID_KAZAN + eKazan = VK_VENDOR_ID_KAZAN, + eCodeplay = VK_VENDOR_ID_CODEPLAY, + eMESA = VK_VENDOR_ID_MESA }; VULKAN_HPP_INLINE std::string to_string( VendorId value ) @@ -8405,6 +9326,8 @@ namespace VULKAN_HPP_NAMESPACE case VendorId::eVIV : return "VIV"; case VendorId::eVSI : return "VSI"; case VendorId::eKazan : return "Kazan"; + case VendorId::eCodeplay : return "Codeplay"; + case VendorId::eMESA : return "MESA"; default: return "invalid"; } } @@ -8453,18 +9376,54 @@ namespace VULKAN_HPP_NAMESPACE } } - template - struct cpp_type + template + struct IndexTypeValue + {}; + + template <> + struct IndexTypeValue { + static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint16; }; - using AccessFlags = Flags; + template <> + struct CppType + { + using Type = uint16_t; + }; + + template <> + struct IndexTypeValue + { + static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint32; + }; + + template <> + struct CppType + { + using Type = uint32_t; + }; + + template <> + struct IndexTypeValue + { + static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint8EXT; + }; + + template <> + struct CppType + { + using Type = uint8_t; + }; + + + using AccessFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { - allFlags = VkFlags(AccessFlagBits::eIndirectCommandRead) | VkFlags(AccessFlagBits::eIndexRead) | VkFlags(AccessFlagBits::eVertexAttributeRead) | VkFlags(AccessFlagBits::eUniformRead) | VkFlags(AccessFlagBits::eInputAttachmentRead) | VkFlags(AccessFlagBits::eShaderRead) | VkFlags(AccessFlagBits::eShaderWrite) | VkFlags(AccessFlagBits::eColorAttachmentRead) | VkFlags(AccessFlagBits::eColorAttachmentWrite) | VkFlags(AccessFlagBits::eDepthStencilAttachmentRead) | VkFlags(AccessFlagBits::eDepthStencilAttachmentWrite) | VkFlags(AccessFlagBits::eTransferRead) | VkFlags(AccessFlagBits::eTransferWrite) | VkFlags(AccessFlagBits::eHostRead) | VkFlags(AccessFlagBits::eHostWrite) | VkFlags(AccessFlagBits::eMemoryRead) | VkFlags(AccessFlagBits::eMemoryWrite) | VkFlags(AccessFlagBits::eTransformFeedbackWriteEXT) | VkFlags(AccessFlagBits::eTransformFeedbackCounterReadEXT) | VkFlags(AccessFlagBits::eTransformFeedbackCounterWriteEXT) | VkFlags(AccessFlagBits::eConditionalRenderingReadEXT) | VkFlags(AccessFlagBits::eCommandProcessReadNVX) | VkFlags(AccessFlagBits::eCommandProcessWriteNVX) | VkFlags(AccessFlagBits::eColorAttachmentReadNoncoherentEXT) | VkFlags(AccessFlagBits::eShadingRateImageReadNV) | VkFlags(AccessFlagBits::eAccelerationStructureReadNV) | VkFlags(AccessFlagBits::eAccelerationStructureWriteNV) | VkFlags(AccessFlagBits::eFragmentDensityMapReadEXT) + allFlags = VkFlags(AccessFlagBits::eIndirectCommandRead) | VkFlags(AccessFlagBits::eIndexRead) | VkFlags(AccessFlagBits::eVertexAttributeRead) | VkFlags(AccessFlagBits::eUniformRead) | VkFlags(AccessFlagBits::eInputAttachmentRead) | VkFlags(AccessFlagBits::eShaderRead) | VkFlags(AccessFlagBits::eShaderWrite) | VkFlags(AccessFlagBits::eColorAttachmentRead) | VkFlags(AccessFlagBits::eColorAttachmentWrite) | VkFlags(AccessFlagBits::eDepthStencilAttachmentRead) | VkFlags(AccessFlagBits::eDepthStencilAttachmentWrite) | VkFlags(AccessFlagBits::eTransferRead) | VkFlags(AccessFlagBits::eTransferWrite) | VkFlags(AccessFlagBits::eHostRead) | VkFlags(AccessFlagBits::eHostWrite) | VkFlags(AccessFlagBits::eMemoryRead) | VkFlags(AccessFlagBits::eMemoryWrite) | VkFlags(AccessFlagBits::eTransformFeedbackWriteEXT) | VkFlags(AccessFlagBits::eTransformFeedbackCounterReadEXT) | VkFlags(AccessFlagBits::eTransformFeedbackCounterWriteEXT) | VkFlags(AccessFlagBits::eConditionalRenderingReadEXT) | VkFlags(AccessFlagBits::eColorAttachmentReadNoncoherentEXT) | VkFlags(AccessFlagBits::eAccelerationStructureReadKHR) | VkFlags(AccessFlagBits::eAccelerationStructureWriteKHR) | VkFlags(AccessFlagBits::eShadingRateImageReadNV) | VkFlags(AccessFlagBits::eFragmentDensityMapReadEXT) | VkFlags(AccessFlagBits::eCommandPreprocessReadNV) | VkFlags(AccessFlagBits::eCommandPreprocessWriteNV) }; }; @@ -8490,6 +9449,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( AccessFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -8514,25 +9474,27 @@ namespace VULKAN_HPP_NAMESPACE if ( value & AccessFlagBits::eTransformFeedbackCounterReadEXT ) result += "TransformFeedbackCounterReadEXT | "; if ( value & AccessFlagBits::eTransformFeedbackCounterWriteEXT ) result += "TransformFeedbackCounterWriteEXT | "; if ( value & AccessFlagBits::eConditionalRenderingReadEXT ) result += "ConditionalRenderingReadEXT | "; - if ( value & AccessFlagBits::eCommandProcessReadNVX ) result += "CommandProcessReadNVX | "; - if ( value & AccessFlagBits::eCommandProcessWriteNVX ) result += "CommandProcessWriteNVX | "; if ( value & AccessFlagBits::eColorAttachmentReadNoncoherentEXT ) result += "ColorAttachmentReadNoncoherentEXT | "; + if ( value & AccessFlagBits::eAccelerationStructureReadKHR ) result += "AccelerationStructureReadKHR | "; + if ( value & AccessFlagBits::eAccelerationStructureWriteKHR ) result += "AccelerationStructureWriteKHR | "; if ( value & AccessFlagBits::eShadingRateImageReadNV ) result += "ShadingRateImageReadNV | "; - if ( value & AccessFlagBits::eAccelerationStructureReadNV ) result += "AccelerationStructureReadNV | "; - if ( value & AccessFlagBits::eAccelerationStructureWriteNV ) result += "AccelerationStructureWriteNV | "; if ( value & AccessFlagBits::eFragmentDensityMapReadEXT ) result += "FragmentDensityMapReadEXT | "; + if ( value & AccessFlagBits::eCommandPreprocessReadNV ) result += "CommandPreprocessReadNV | "; + if ( value & AccessFlagBits::eCommandPreprocessWriteNV ) result += "CommandPreprocessWriteNV | "; return "{ " + result.substr(0, result.size() - 3) + " }"; } - using AcquireProfilingLockFlagsKHR = Flags; + + using AcquireProfilingLockFlagsKHR = Flags; VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagsKHR ) { + return "{}"; } #ifdef VK_USE_PLATFORM_ANDROID_KHR - enum class AndroidSurfaceCreateFlagBitsKHR + enum class AndroidSurfaceCreateFlagBitsKHR : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagBitsKHR ) @@ -8540,19 +9502,21 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using AndroidSurfaceCreateFlagsKHR = Flags; + using AndroidSurfaceCreateFlagsKHR = Flags; VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagsKHR ) { + return "{}"; } #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - using AttachmentDescriptionFlags = Flags; + + using AttachmentDescriptionFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(AttachmentDescriptionFlagBits::eMayAlias) }; @@ -8580,6 +9544,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -8587,11 +9552,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using BufferCreateFlags = Flags; + + using BufferCreateFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(BufferCreateFlagBits::eSparseBinding) | VkFlags(BufferCreateFlagBits::eSparseResidency) | VkFlags(BufferCreateFlagBits::eSparseAliased) | VkFlags(BufferCreateFlagBits::eProtected) | VkFlags(BufferCreateFlagBits::eDeviceAddressCaptureReplay) }; @@ -8619,6 +9585,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( BufferCreateFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -8630,13 +9597,14 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using BufferUsageFlags = Flags; + + using BufferUsageFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { - allFlags = VkFlags(BufferUsageFlagBits::eTransferSrc) | VkFlags(BufferUsageFlagBits::eTransferDst) | VkFlags(BufferUsageFlagBits::eUniformTexelBuffer) | VkFlags(BufferUsageFlagBits::eStorageTexelBuffer) | VkFlags(BufferUsageFlagBits::eUniformBuffer) | VkFlags(BufferUsageFlagBits::eStorageBuffer) | VkFlags(BufferUsageFlagBits::eIndexBuffer) | VkFlags(BufferUsageFlagBits::eVertexBuffer) | VkFlags(BufferUsageFlagBits::eIndirectBuffer) | VkFlags(BufferUsageFlagBits::eShaderDeviceAddress) | VkFlags(BufferUsageFlagBits::eTransformFeedbackBufferEXT) | VkFlags(BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT) | VkFlags(BufferUsageFlagBits::eConditionalRenderingEXT) | VkFlags(BufferUsageFlagBits::eRayTracingNV) + allFlags = VkFlags(BufferUsageFlagBits::eTransferSrc) | VkFlags(BufferUsageFlagBits::eTransferDst) | VkFlags(BufferUsageFlagBits::eUniformTexelBuffer) | VkFlags(BufferUsageFlagBits::eStorageTexelBuffer) | VkFlags(BufferUsageFlagBits::eUniformBuffer) | VkFlags(BufferUsageFlagBits::eStorageBuffer) | VkFlags(BufferUsageFlagBits::eIndexBuffer) | VkFlags(BufferUsageFlagBits::eVertexBuffer) | VkFlags(BufferUsageFlagBits::eIndirectBuffer) | VkFlags(BufferUsageFlagBits::eShaderDeviceAddress) | VkFlags(BufferUsageFlagBits::eTransformFeedbackBufferEXT) | VkFlags(BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT) | VkFlags(BufferUsageFlagBits::eConditionalRenderingEXT) | VkFlags(BufferUsageFlagBits::eRayTracingKHR) }; }; @@ -8662,6 +9630,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( BufferUsageFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -8678,65 +9647,79 @@ namespace VULKAN_HPP_NAMESPACE if ( value & BufferUsageFlagBits::eTransformFeedbackBufferEXT ) result += "TransformFeedbackBufferEXT | "; if ( value & BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT ) result += "TransformFeedbackCounterBufferEXT | "; if ( value & BufferUsageFlagBits::eConditionalRenderingEXT ) result += "ConditionalRenderingEXT | "; - if ( value & BufferUsageFlagBits::eRayTracingNV ) result += "RayTracingNV | "; + if ( value & BufferUsageFlagBits::eRayTracingKHR ) result += "RayTracingKHR | "; return "{ " + result.substr(0, result.size() - 3) + " }"; } - using BufferViewCreateFlags = Flags; + enum class BufferViewCreateFlagBits : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlagBits ) + { + return "(void)"; + } + + using BufferViewCreateFlags = Flags; VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlags ) { + return "{}"; } - using BuildAccelerationStructureFlagsNV = Flags; - template <> struct FlagTraits + using BuildAccelerationStructureFlagsKHR = Flags; + + template <> struct FlagTraits { - enum + enum : VkFlags { - allFlags = VkFlags(BuildAccelerationStructureFlagBitsNV::eAllowUpdate) | VkFlags(BuildAccelerationStructureFlagBitsNV::eAllowCompaction) | VkFlags(BuildAccelerationStructureFlagBitsNV::ePreferFastTrace) | VkFlags(BuildAccelerationStructureFlagBitsNV::ePreferFastBuild) | VkFlags(BuildAccelerationStructureFlagBitsNV::eLowMemory) + allFlags = VkFlags(BuildAccelerationStructureFlagBitsKHR::eAllowUpdate) | VkFlags(BuildAccelerationStructureFlagBitsKHR::eAllowCompaction) | VkFlags(BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace) | VkFlags(BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild) | VkFlags(BuildAccelerationStructureFlagBitsKHR::eLowMemory) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsNV operator|( BuildAccelerationStructureFlagBitsNV bit0, BuildAccelerationStructureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator|( BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { - return BuildAccelerationStructureFlagsNV( bit0 ) | bit1; + return BuildAccelerationStructureFlagsKHR( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsNV operator&( BuildAccelerationStructureFlagBitsNV bit0, BuildAccelerationStructureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator&( BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { - return BuildAccelerationStructureFlagsNV( bit0 ) & bit1; + return BuildAccelerationStructureFlagsKHR( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsNV operator^( BuildAccelerationStructureFlagBitsNV bit0, BuildAccelerationStructureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator^( BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { - return BuildAccelerationStructureFlagsNV( bit0 ) ^ bit1; + return BuildAccelerationStructureFlagsKHR( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsNV operator~( BuildAccelerationStructureFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator~( BuildAccelerationStructureFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT { - return ~( BuildAccelerationStructureFlagsNV( bits ) ); + return ~( BuildAccelerationStructureFlagsKHR( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagsNV value ) + using BuildAccelerationStructureFlagsNV = BuildAccelerationStructureFlagsKHR; + + VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagsKHR value ) { + if ( !value ) return "{}"; std::string result; - if ( value & BuildAccelerationStructureFlagBitsNV::eAllowUpdate ) result += "AllowUpdate | "; - if ( value & BuildAccelerationStructureFlagBitsNV::eAllowCompaction ) result += "AllowCompaction | "; - if ( value & BuildAccelerationStructureFlagBitsNV::ePreferFastTrace ) result += "PreferFastTrace | "; - if ( value & BuildAccelerationStructureFlagBitsNV::ePreferFastBuild ) result += "PreferFastBuild | "; - if ( value & BuildAccelerationStructureFlagBitsNV::eLowMemory ) result += "LowMemory | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowUpdate ) result += "AllowUpdate | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowCompaction ) result += "AllowCompaction | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace ) result += "PreferFastTrace | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild ) result += "PreferFastBuild | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::eLowMemory ) result += "LowMemory | "; return "{ " + result.substr(0, result.size() - 3) + " }"; } - using ColorComponentFlags = Flags; + + using ColorComponentFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(ColorComponentFlagBits::eR) | VkFlags(ColorComponentFlagBits::eG) | VkFlags(ColorComponentFlagBits::eB) | VkFlags(ColorComponentFlagBits::eA) }; @@ -8764,6 +9747,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( ColorComponentFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -8774,11 +9758,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using CommandBufferResetFlags = Flags; + + using CommandBufferResetFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(CommandBufferResetFlagBits::eReleaseResources) }; @@ -8806,6 +9791,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -8813,11 +9799,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using CommandBufferUsageFlags = Flags; + + using CommandBufferUsageFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(CommandBufferUsageFlagBits::eOneTimeSubmit) | VkFlags(CommandBufferUsageFlagBits::eRenderPassContinue) | VkFlags(CommandBufferUsageFlagBits::eSimultaneousUse) }; @@ -8845,6 +9832,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -8854,11 +9842,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using CommandPoolCreateFlags = Flags; + + using CommandPoolCreateFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(CommandPoolCreateFlagBits::eTransient) | VkFlags(CommandPoolCreateFlagBits::eResetCommandBuffer) | VkFlags(CommandPoolCreateFlagBits::eProtected) }; @@ -8886,6 +9875,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -8895,11 +9885,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using CommandPoolResetFlags = Flags; + + using CommandPoolResetFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(CommandPoolResetFlagBits::eReleaseResources) }; @@ -8927,6 +9918,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -8934,7 +9926,7 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - enum class CommandPoolTrimFlagBits + enum class CommandPoolTrimFlagBits : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlagBits ) @@ -8942,20 +9934,22 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using CommandPoolTrimFlags = Flags; + using CommandPoolTrimFlags = Flags; using CommandPoolTrimFlagsKHR = CommandPoolTrimFlags; VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlags ) { + return "{}"; } - using CompositeAlphaFlagsKHR = Flags; + + using CompositeAlphaFlagsKHR = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(CompositeAlphaFlagBitsKHR::eOpaque) | VkFlags(CompositeAlphaFlagBitsKHR::ePreMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::ePostMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::eInherit) }; @@ -8983,6 +9977,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagsKHR value ) { + if ( !value ) return "{}"; std::string result; @@ -8993,11 +9988,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using ConditionalRenderingFlagsEXT = Flags; + + using ConditionalRenderingFlagsEXT = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(ConditionalRenderingFlagBitsEXT::eInverted) }; @@ -9025,6 +10021,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagsEXT value ) { + if ( !value ) return "{}"; std::string result; @@ -9032,11 +10029,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using CullModeFlags = Flags; + + using CullModeFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(CullModeFlagBits::eNone) | VkFlags(CullModeFlagBits::eFront) | VkFlags(CullModeFlagBits::eBack) | VkFlags(CullModeFlagBits::eFrontAndBack) }; @@ -9064,6 +10062,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( CullModeFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -9072,11 +10071,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using DebugReportFlagsEXT = Flags; + + using DebugReportFlagsEXT = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(DebugReportFlagBitsEXT::eInformation) | VkFlags(DebugReportFlagBitsEXT::eWarning) | VkFlags(DebugReportFlagBitsEXT::ePerformanceWarning) | VkFlags(DebugReportFlagBitsEXT::eError) | VkFlags(DebugReportFlagBitsEXT::eDebug) }; @@ -9104,6 +10104,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( DebugReportFlagsEXT value ) { + if ( !value ) return "{}"; std::string result; @@ -9115,11 +10116,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using DebugUtilsMessageSeverityFlagsEXT = Flags; + + using DebugUtilsMessageSeverityFlagsEXT = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eVerbose) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eInfo) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eWarning) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eError) }; @@ -9147,6 +10149,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagsEXT value ) { + if ( !value ) return "{}"; std::string result; @@ -9157,11 +10160,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using DebugUtilsMessageTypeFlagsEXT = Flags; + + using DebugUtilsMessageTypeFlagsEXT = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(DebugUtilsMessageTypeFlagBitsEXT::eGeneral) | VkFlags(DebugUtilsMessageTypeFlagBitsEXT::eValidation) | VkFlags(DebugUtilsMessageTypeFlagBitsEXT::ePerformance) }; @@ -9189,6 +10193,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagsEXT value ) { + if ( !value ) return "{}"; std::string result; @@ -9198,7 +10203,7 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - enum class DebugUtilsMessengerCallbackDataFlagBitsEXT + enum class DebugUtilsMessengerCallbackDataFlagBitsEXT : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagBitsEXT ) @@ -9206,14 +10211,15 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using DebugUtilsMessengerCallbackDataFlagsEXT = Flags; + using DebugUtilsMessengerCallbackDataFlagsEXT = Flags; VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagsEXT ) { + return "{}"; } - enum class DebugUtilsMessengerCreateFlagBitsEXT + enum class DebugUtilsMessengerCreateFlagBitsEXT : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagBitsEXT ) @@ -9221,18 +10227,20 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using DebugUtilsMessengerCreateFlagsEXT = Flags; + using DebugUtilsMessengerCreateFlagsEXT = Flags; VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagsEXT ) { + return "{}"; } - using DependencyFlags = Flags; + + using DependencyFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(DependencyFlagBits::eByRegion) | VkFlags(DependencyFlagBits::eDeviceGroup) | VkFlags(DependencyFlagBits::eViewLocal) }; @@ -9260,6 +10268,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( DependencyFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -9269,11 +10278,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using DescriptorBindingFlags = Flags; + + using DescriptorBindingFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(DescriptorBindingFlagBits::eUpdateAfterBind) | VkFlags(DescriptorBindingFlagBits::eUpdateUnusedWhilePending) | VkFlags(DescriptorBindingFlagBits::ePartiallyBound) | VkFlags(DescriptorBindingFlagBits::eVariableDescriptorCount) }; @@ -9303,6 +10313,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -9313,11 +10324,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using DescriptorPoolCreateFlags = Flags; + + using DescriptorPoolCreateFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(DescriptorPoolCreateFlagBits::eFreeDescriptorSet) | VkFlags(DescriptorPoolCreateFlagBits::eUpdateAfterBind) }; @@ -9345,6 +10357,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -9353,7 +10366,7 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - enum class DescriptorPoolResetFlagBits + enum class DescriptorPoolResetFlagBits : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlagBits ) @@ -9361,18 +10374,20 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using DescriptorPoolResetFlags = Flags; + using DescriptorPoolResetFlags = Flags; VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlags ) { + return "{}"; } - using DescriptorSetLayoutCreateFlags = Flags; + + using DescriptorSetLayoutCreateFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool) | VkFlags(DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR) }; @@ -9400,6 +10415,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -9408,7 +10424,7 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - enum class DescriptorUpdateTemplateCreateFlagBits + enum class DescriptorUpdateTemplateCreateFlagBits : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlagBits ) @@ -9416,27 +10432,74 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using DescriptorUpdateTemplateCreateFlags = Flags; + using DescriptorUpdateTemplateCreateFlags = Flags; using DescriptorUpdateTemplateCreateFlagsKHR = DescriptorUpdateTemplateCreateFlags; VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlags ) { + return "{}"; } - using DeviceCreateFlags = Flags; + + using DeviceCreateFlags = Flags; VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlags ) { + return "{}"; } - using DeviceGroupPresentModeFlagsKHR = Flags; + + using DeviceDiagnosticsConfigFlagsNV = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo) | VkFlags(DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking) | VkFlags(DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator|( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceDiagnosticsConfigFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator&( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceDiagnosticsConfigFlagsNV( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator^( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceDiagnosticsConfigFlagsNV( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator~( DeviceDiagnosticsConfigFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DeviceDiagnosticsConfigFlagsNV( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagsNV value ) + { + + if ( !value ) return "{}"; + std::string result; + + if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo ) result += "EnableShaderDebugInfo | "; + if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking ) result += "EnableResourceTracking | "; + if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints ) result += "EnableAutomaticCheckpoints | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + + using DeviceGroupPresentModeFlagsKHR = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(DeviceGroupPresentModeFlagBitsKHR::eLocal) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eRemote) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eSum) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice) }; @@ -9464,6 +10527,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagsKHR value ) { + if ( !value ) return "{}"; std::string result; @@ -9474,11 +10538,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using DeviceQueueCreateFlags = Flags; + + using DeviceQueueCreateFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(DeviceQueueCreateFlagBits::eProtected) }; @@ -9506,6 +10571,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -9513,7 +10579,25 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - enum class DisplayModeCreateFlagBitsKHR +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + enum class DirectFBSurfaceCreateFlagBitsEXT : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( DirectFBSurfaceCreateFlagBitsEXT ) + { + return "(void)"; + } + + using DirectFBSurfaceCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( DirectFBSurfaceCreateFlagsEXT ) + { + + return "{}"; + } +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + enum class DisplayModeCreateFlagBitsKHR : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagBitsKHR ) @@ -9521,18 +10605,20 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using DisplayModeCreateFlagsKHR = Flags; + using DisplayModeCreateFlagsKHR = Flags; VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagsKHR ) { + return "{}"; } - using DisplayPlaneAlphaFlagsKHR = Flags; + + using DisplayPlaneAlphaFlagsKHR = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(DisplayPlaneAlphaFlagBitsKHR::eOpaque) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::eGlobal) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixel) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied) }; @@ -9560,6 +10646,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagsKHR value ) { + if ( !value ) return "{}"; std::string result; @@ -9570,7 +10657,7 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - enum class DisplaySurfaceCreateFlagBitsKHR + enum class DisplaySurfaceCreateFlagBitsKHR : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagBitsKHR ) @@ -9578,14 +10665,15 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using DisplaySurfaceCreateFlagsKHR = Flags; + using DisplaySurfaceCreateFlagsKHR = Flags; VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagsKHR ) { + return "{}"; } - enum class EventCreateFlagBits + enum class EventCreateFlagBits : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( EventCreateFlagBits ) @@ -9593,18 +10681,20 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using EventCreateFlags = Flags; + using EventCreateFlags = Flags; VULKAN_HPP_INLINE std::string to_string( EventCreateFlags ) { + return "{}"; } - using ExternalFenceFeatureFlags = Flags; + + using ExternalFenceFeatureFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(ExternalFenceFeatureFlagBits::eExportable) | VkFlags(ExternalFenceFeatureFlagBits::eImportable) }; @@ -9634,6 +10724,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -9642,11 +10733,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using ExternalFenceHandleTypeFlags = Flags; + + using ExternalFenceHandleTypeFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalFenceHandleTypeFlagBits::eSyncFd) }; @@ -9676,6 +10768,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -9686,11 +10779,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using ExternalMemoryFeatureFlags = Flags; + + using ExternalMemoryFeatureFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(ExternalMemoryFeatureFlagBits::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBits::eExportable) | VkFlags(ExternalMemoryFeatureFlagBits::eImportable) }; @@ -9720,6 +10814,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -9729,11 +10824,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using ExternalMemoryFeatureFlagsNV = Flags; + + using ExternalMemoryFeatureFlagsNV = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eExportable) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eImportable) }; @@ -9761,6 +10857,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagsNV value ) { + if ( !value ) return "{}"; std::string result; @@ -9770,11 +10867,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using ExternalMemoryHandleTypeFlags = Flags; + + using ExternalMemoryHandleTypeFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D11Texture) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D12Heap) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D12Resource) | VkFlags(ExternalMemoryHandleTypeFlagBits::eDmaBufEXT) | VkFlags(ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID) | VkFlags(ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT) | VkFlags(ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT) }; @@ -9804,6 +10902,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -9821,11 +10920,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using ExternalMemoryHandleTypeFlagsNV = Flags; + + using ExternalMemoryHandleTypeFlagsNV = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt) }; @@ -9853,6 +10953,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagsNV value ) { + if ( !value ) return "{}"; std::string result; @@ -9863,11 +10964,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using ExternalSemaphoreFeatureFlags = Flags; + + using ExternalSemaphoreFeatureFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(ExternalSemaphoreFeatureFlagBits::eExportable) | VkFlags(ExternalSemaphoreFeatureFlagBits::eImportable) }; @@ -9897,6 +10999,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -9905,11 +11008,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using ExternalSemaphoreHandleTypeFlags = Flags; + + using ExternalSemaphoreHandleTypeFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eSyncFd) }; @@ -9939,6 +11043,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -9950,11 +11055,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using FenceCreateFlags = Flags; + + using FenceCreateFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(FenceCreateFlagBits::eSignaled) }; @@ -9982,6 +11088,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( FenceCreateFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -9989,11 +11096,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using FenceImportFlags = Flags; + + using FenceImportFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(FenceImportFlagBits::eTemporary) }; @@ -10023,6 +11131,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( FenceImportFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -10030,13 +11139,14 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using FormatFeatureFlags = Flags; + + using FormatFeatureFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { - allFlags = VkFlags(FormatFeatureFlagBits::eSampledImage) | VkFlags(FormatFeatureFlagBits::eStorageImage) | VkFlags(FormatFeatureFlagBits::eStorageImageAtomic) | VkFlags(FormatFeatureFlagBits::eUniformTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBufferAtomic) | VkFlags(FormatFeatureFlagBits::eVertexBuffer) | VkFlags(FormatFeatureFlagBits::eColorAttachment) | VkFlags(FormatFeatureFlagBits::eColorAttachmentBlend) | VkFlags(FormatFeatureFlagBits::eDepthStencilAttachment) | VkFlags(FormatFeatureFlagBits::eBlitSrc) | VkFlags(FormatFeatureFlagBits::eBlitDst) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterLinear) | VkFlags(FormatFeatureFlagBits::eTransferSrc) | VkFlags(FormatFeatureFlagBits::eTransferDst) | VkFlags(FormatFeatureFlagBits::eMidpointChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable) | VkFlags(FormatFeatureFlagBits::eDisjoint) | VkFlags(FormatFeatureFlagBits::eCositedChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterMinmax) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterCubicIMG) | VkFlags(FormatFeatureFlagBits::eFragmentDensityMapEXT) + allFlags = VkFlags(FormatFeatureFlagBits::eSampledImage) | VkFlags(FormatFeatureFlagBits::eStorageImage) | VkFlags(FormatFeatureFlagBits::eStorageImageAtomic) | VkFlags(FormatFeatureFlagBits::eUniformTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBufferAtomic) | VkFlags(FormatFeatureFlagBits::eVertexBuffer) | VkFlags(FormatFeatureFlagBits::eColorAttachment) | VkFlags(FormatFeatureFlagBits::eColorAttachmentBlend) | VkFlags(FormatFeatureFlagBits::eDepthStencilAttachment) | VkFlags(FormatFeatureFlagBits::eBlitSrc) | VkFlags(FormatFeatureFlagBits::eBlitDst) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterLinear) | VkFlags(FormatFeatureFlagBits::eTransferSrc) | VkFlags(FormatFeatureFlagBits::eTransferDst) | VkFlags(FormatFeatureFlagBits::eMidpointChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable) | VkFlags(FormatFeatureFlagBits::eDisjoint) | VkFlags(FormatFeatureFlagBits::eCositedChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterMinmax) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterCubicIMG) | VkFlags(FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR) | VkFlags(FormatFeatureFlagBits::eFragmentDensityMapEXT) }; }; @@ -10062,6 +11172,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -10089,15 +11200,17 @@ namespace VULKAN_HPP_NAMESPACE if ( value & FormatFeatureFlagBits::eCositedChromaSamples ) result += "CositedChromaSamples | "; if ( value & FormatFeatureFlagBits::eSampledImageFilterMinmax ) result += "SampledImageFilterMinmax | "; if ( value & FormatFeatureFlagBits::eSampledImageFilterCubicIMG ) result += "SampledImageFilterCubicIMG | "; + if ( value & FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR ) result += "AccelerationStructureVertexBufferKHR | "; if ( value & FormatFeatureFlagBits::eFragmentDensityMapEXT ) result += "FragmentDensityMapEXT | "; return "{ " + result.substr(0, result.size() - 3) + " }"; } - using FramebufferCreateFlags = Flags; + + using FramebufferCreateFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(FramebufferCreateFlagBits::eImageless) }; @@ -10125,6 +11238,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -10132,89 +11246,97 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using GeometryFlagsNV = Flags; - template <> struct FlagTraits + using GeometryFlagsKHR = Flags; + + template <> struct FlagTraits { - enum + enum : VkFlags { - allFlags = VkFlags(GeometryFlagBitsNV::eOpaque) | VkFlags(GeometryFlagBitsNV::eNoDuplicateAnyHitInvocation) + allFlags = VkFlags(GeometryFlagBitsKHR::eOpaque) | VkFlags(GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsNV operator|( GeometryFlagBitsNV bit0, GeometryFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator|( GeometryFlagBitsKHR bit0, GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { - return GeometryFlagsNV( bit0 ) | bit1; + return GeometryFlagsKHR( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsNV operator&( GeometryFlagBitsNV bit0, GeometryFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator&( GeometryFlagBitsKHR bit0, GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { - return GeometryFlagsNV( bit0 ) & bit1; + return GeometryFlagsKHR( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsNV operator^( GeometryFlagBitsNV bit0, GeometryFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator^( GeometryFlagBitsKHR bit0, GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { - return GeometryFlagsNV( bit0 ) ^ bit1; + return GeometryFlagsKHR( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsNV operator~( GeometryFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator~( GeometryFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT { - return ~( GeometryFlagsNV( bits ) ); + return ~( GeometryFlagsKHR( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( GeometryFlagsNV value ) + using GeometryFlagsNV = GeometryFlagsKHR; + + VULKAN_HPP_INLINE std::string to_string( GeometryFlagsKHR value ) { + if ( !value ) return "{}"; std::string result; - if ( value & GeometryFlagBitsNV::eOpaque ) result += "Opaque | "; - if ( value & GeometryFlagBitsNV::eNoDuplicateAnyHitInvocation ) result += "NoDuplicateAnyHitInvocation | "; + if ( value & GeometryFlagBitsKHR::eOpaque ) result += "Opaque | "; + if ( value & GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation ) result += "NoDuplicateAnyHitInvocation | "; return "{ " + result.substr(0, result.size() - 3) + " }"; } - using GeometryInstanceFlagsNV = Flags; - template <> struct FlagTraits + using GeometryInstanceFlagsKHR = Flags; + + template <> struct FlagTraits { - enum + enum : VkFlags { - allFlags = VkFlags(GeometryInstanceFlagBitsNV::eTriangleCullDisable) | VkFlags(GeometryInstanceFlagBitsNV::eTriangleFrontCounterclockwise) | VkFlags(GeometryInstanceFlagBitsNV::eForceOpaque) | VkFlags(GeometryInstanceFlagBitsNV::eForceNoOpaque) + allFlags = VkFlags(GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable) | VkFlags(GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise) | VkFlags(GeometryInstanceFlagBitsKHR::eForceOpaque) | VkFlags(GeometryInstanceFlagBitsKHR::eForceNoOpaque) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsNV operator|( GeometryInstanceFlagBitsNV bit0, GeometryInstanceFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator|( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { - return GeometryInstanceFlagsNV( bit0 ) | bit1; + return GeometryInstanceFlagsKHR( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsNV operator&( GeometryInstanceFlagBitsNV bit0, GeometryInstanceFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator&( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { - return GeometryInstanceFlagsNV( bit0 ) & bit1; + return GeometryInstanceFlagsKHR( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsNV operator^( GeometryInstanceFlagBitsNV bit0, GeometryInstanceFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator^( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { - return GeometryInstanceFlagsNV( bit0 ) ^ bit1; + return GeometryInstanceFlagsKHR( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsNV operator~( GeometryInstanceFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator~( GeometryInstanceFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT { - return ~( GeometryInstanceFlagsNV( bits ) ); + return ~( GeometryInstanceFlagsKHR( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagsNV value ) + using GeometryInstanceFlagsNV = GeometryInstanceFlagsKHR; + + VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagsKHR value ) { + if ( !value ) return "{}"; std::string result; - if ( value & GeometryInstanceFlagBitsNV::eTriangleCullDisable ) result += "TriangleCullDisable | "; - if ( value & GeometryInstanceFlagBitsNV::eTriangleFrontCounterclockwise ) result += "TriangleFrontCounterclockwise | "; - if ( value & GeometryInstanceFlagBitsNV::eForceOpaque ) result += "ForceOpaque | "; - if ( value & GeometryInstanceFlagBitsNV::eForceNoOpaque ) result += "ForceNoOpaque | "; + if ( value & GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable ) result += "TriangleFacingCullDisable | "; + if ( value & GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise ) result += "TriangleFrontCounterclockwise | "; + if ( value & GeometryInstanceFlagBitsKHR::eForceOpaque ) result += "ForceOpaque | "; + if ( value & GeometryInstanceFlagBitsKHR::eForceNoOpaque ) result += "ForceNoOpaque | "; return "{ " + result.substr(0, result.size() - 3) + " }"; } - enum class HeadlessSurfaceCreateFlagBitsEXT + enum class HeadlessSurfaceCreateFlagBitsEXT : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagBitsEXT ) @@ -10222,15 +11344,16 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using HeadlessSurfaceCreateFlagsEXT = Flags; + using HeadlessSurfaceCreateFlagsEXT = Flags; VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagsEXT ) { + return "{}"; } #ifdef VK_USE_PLATFORM_IOS_MVK - enum class IOSSurfaceCreateFlagBitsMVK + enum class IOSSurfaceCreateFlagBitsMVK : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagBitsMVK ) @@ -10238,19 +11361,21 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using IOSSurfaceCreateFlagsMVK = Flags; + using IOSSurfaceCreateFlagsMVK = Flags; VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagsMVK ) { + return "{}"; } #endif /*VK_USE_PLATFORM_IOS_MVK*/ - using ImageAspectFlags = Flags; + + using ImageAspectFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(ImageAspectFlagBits::eColor) | VkFlags(ImageAspectFlagBits::eDepth) | VkFlags(ImageAspectFlagBits::eStencil) | VkFlags(ImageAspectFlagBits::eMetadata) | VkFlags(ImageAspectFlagBits::ePlane0) | VkFlags(ImageAspectFlagBits::ePlane1) | VkFlags(ImageAspectFlagBits::ePlane2) | VkFlags(ImageAspectFlagBits::eMemoryPlane0EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane1EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane2EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane3EXT) }; @@ -10278,6 +11403,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( ImageAspectFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -10295,11 +11421,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using ImageCreateFlags = Flags; + + using ImageCreateFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(ImageCreateFlagBits::eSparseBinding) | VkFlags(ImageCreateFlagBits::eSparseResidency) | VkFlags(ImageCreateFlagBits::eSparseAliased) | VkFlags(ImageCreateFlagBits::eMutableFormat) | VkFlags(ImageCreateFlagBits::eCubeCompatible) | VkFlags(ImageCreateFlagBits::eAlias) | VkFlags(ImageCreateFlagBits::eSplitInstanceBindRegions) | VkFlags(ImageCreateFlagBits::e2DArrayCompatible) | VkFlags(ImageCreateFlagBits::eBlockTexelViewCompatible) | VkFlags(ImageCreateFlagBits::eExtendedUsage) | VkFlags(ImageCreateFlagBits::eProtected) | VkFlags(ImageCreateFlagBits::eDisjoint) | VkFlags(ImageCreateFlagBits::eCornerSampledNV) | VkFlags(ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT) | VkFlags(ImageCreateFlagBits::eSubsampledEXT) }; @@ -10327,6 +11454,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( ImageCreateFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -10349,7 +11477,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifdef VK_USE_PLATFORM_FUCHSIA - enum class ImagePipeSurfaceCreateFlagBitsFUCHSIA + enum class ImagePipeSurfaceCreateFlagBitsFUCHSIA : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagBitsFUCHSIA ) @@ -10357,19 +11485,21 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using ImagePipeSurfaceCreateFlagsFUCHSIA = Flags; + using ImagePipeSurfaceCreateFlagsFUCHSIA = Flags; VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagsFUCHSIA ) { + return "{}"; } #endif /*VK_USE_PLATFORM_FUCHSIA*/ - using ImageUsageFlags = Flags; + + using ImageUsageFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(ImageUsageFlagBits::eTransferSrc) | VkFlags(ImageUsageFlagBits::eTransferDst) | VkFlags(ImageUsageFlagBits::eSampled) | VkFlags(ImageUsageFlagBits::eStorage) | VkFlags(ImageUsageFlagBits::eColorAttachment) | VkFlags(ImageUsageFlagBits::eDepthStencilAttachment) | VkFlags(ImageUsageFlagBits::eTransientAttachment) | VkFlags(ImageUsageFlagBits::eInputAttachment) | VkFlags(ImageUsageFlagBits::eShadingRateImageNV) | VkFlags(ImageUsageFlagBits::eFragmentDensityMapEXT) }; @@ -10397,6 +11527,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( ImageUsageFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -10413,13 +11544,14 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using ImageViewCreateFlags = Flags; + + using ImageViewCreateFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { - allFlags = VkFlags(ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT) + allFlags = VkFlags(ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT) | VkFlags(ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT) }; }; @@ -10445,64 +11577,110 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlags value ) { + if ( !value ) return "{}"; std::string result; if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT ) result += "FragmentDensityMapDynamicEXT | "; + if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT ) result += "FragmentDensityMapDeferredEXT | "; return "{ " + result.substr(0, result.size() - 3) + " }"; } - using IndirectCommandsLayoutUsageFlagsNVX = Flags; - template <> struct FlagTraits + using IndirectCommandsLayoutUsageFlagsNV = Flags; + + template <> struct FlagTraits { - enum + enum : VkFlags { - allFlags = VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences) + allFlags = VkFlags(IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNVX operator|( IndirectCommandsLayoutUsageFlagBitsNVX bit0, IndirectCommandsLayoutUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator|( IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT { - return IndirectCommandsLayoutUsageFlagsNVX( bit0 ) | bit1; + return IndirectCommandsLayoutUsageFlagsNV( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNVX operator&( IndirectCommandsLayoutUsageFlagBitsNVX bit0, IndirectCommandsLayoutUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator&( IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT { - return IndirectCommandsLayoutUsageFlagsNVX( bit0 ) & bit1; + return IndirectCommandsLayoutUsageFlagsNV( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNVX operator^( IndirectCommandsLayoutUsageFlagBitsNVX bit0, IndirectCommandsLayoutUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator^( IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT { - return IndirectCommandsLayoutUsageFlagsNVX( bit0 ) ^ bit1; + return IndirectCommandsLayoutUsageFlagsNV( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNVX operator~( IndirectCommandsLayoutUsageFlagBitsNVX bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator~( IndirectCommandsLayoutUsageFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT { - return ~( IndirectCommandsLayoutUsageFlagsNVX( bits ) ); + return ~( IndirectCommandsLayoutUsageFlagsNV( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsNVX value ) + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsNV value ) { + if ( !value ) return "{}"; std::string result; - if ( value & IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences ) result += "UnorderedSequences | "; - if ( value & IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences ) result += "SparseSequences | "; - if ( value & IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions ) result += "EmptyExecutions | "; - if ( value & IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences ) result += "IndexedSequences | "; + if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess ) result += "ExplicitPreprocess | "; + if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences ) result += "IndexedSequences | "; + if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences ) result += "UnorderedSequences | "; return "{ " + result.substr(0, result.size() - 3) + " }"; } - using InstanceCreateFlags = Flags; + + using IndirectStateFlagsNV = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(IndirectStateFlagBitsNV::eFlagFrontface) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator|( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return IndirectStateFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator&( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return IndirectStateFlagsNV( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator^( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return IndirectStateFlagsNV( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator~( IndirectStateFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + { + return ~( IndirectStateFlagsNV( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagsNV value ) + { + + if ( !value ) return "{}"; + std::string result; + + if ( value & IndirectStateFlagBitsNV::eFlagFrontface ) result += "FlagFrontface | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + + using InstanceCreateFlags = Flags; VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlags ) { + return "{}"; } #ifdef VK_USE_PLATFORM_MACOS_MVK - enum class MacOSSurfaceCreateFlagBitsMVK + enum class MacOSSurfaceCreateFlagBitsMVK : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagBitsMVK ) @@ -10510,19 +11688,21 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using MacOSSurfaceCreateFlagsMVK = Flags; + using MacOSSurfaceCreateFlagsMVK = Flags; VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagsMVK ) { + return "{}"; } #endif /*VK_USE_PLATFORM_MACOS_MVK*/ - using MemoryAllocateFlags = Flags; + + using MemoryAllocateFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(MemoryAllocateFlagBits::eDeviceMask) | VkFlags(MemoryAllocateFlagBits::eDeviceAddress) | VkFlags(MemoryAllocateFlagBits::eDeviceAddressCaptureReplay) }; @@ -10552,6 +11732,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -10561,11 +11742,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using MemoryHeapFlags = Flags; + + using MemoryHeapFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(MemoryHeapFlagBits::eDeviceLocal) | VkFlags(MemoryHeapFlagBits::eMultiInstance) }; @@ -10593,6 +11775,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -10601,7 +11784,7 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - enum class MemoryMapFlagBits + enum class MemoryMapFlagBits : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( MemoryMapFlagBits ) @@ -10609,18 +11792,20 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using MemoryMapFlags = Flags; + using MemoryMapFlags = Flags; VULKAN_HPP_INLINE std::string to_string( MemoryMapFlags ) { + return "{}"; } - using MemoryPropertyFlags = Flags; + + using MemoryPropertyFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(MemoryPropertyFlagBits::eDeviceLocal) | VkFlags(MemoryPropertyFlagBits::eHostVisible) | VkFlags(MemoryPropertyFlagBits::eHostCoherent) | VkFlags(MemoryPropertyFlagBits::eHostCached) | VkFlags(MemoryPropertyFlagBits::eLazilyAllocated) | VkFlags(MemoryPropertyFlagBits::eProtected) | VkFlags(MemoryPropertyFlagBits::eDeviceCoherentAMD) | VkFlags(MemoryPropertyFlagBits::eDeviceUncachedAMD) }; @@ -10648,6 +11833,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -10663,7 +11849,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifdef VK_USE_PLATFORM_METAL_EXT - enum class MetalSurfaceCreateFlagBitsEXT + enum class MetalSurfaceCreateFlagBitsEXT : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagBitsEXT ) @@ -10671,59 +11857,21 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using MetalSurfaceCreateFlagsEXT = Flags; + using MetalSurfaceCreateFlagsEXT = Flags; VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagsEXT ) { + return "{}"; } #endif /*VK_USE_PLATFORM_METAL_EXT*/ - using ObjectEntryUsageFlagsNVX = Flags; - template <> struct FlagTraits - { - enum - { - allFlags = VkFlags(ObjectEntryUsageFlagBitsNVX::eGraphics) | VkFlags(ObjectEntryUsageFlagBitsNVX::eCompute) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ObjectEntryUsageFlagsNVX operator|( ObjectEntryUsageFlagBitsNVX bit0, ObjectEntryUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT - { - return ObjectEntryUsageFlagsNVX( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ObjectEntryUsageFlagsNVX operator&( ObjectEntryUsageFlagBitsNVX bit0, ObjectEntryUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT - { - return ObjectEntryUsageFlagsNVX( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ObjectEntryUsageFlagsNVX operator^( ObjectEntryUsageFlagBitsNVX bit0, ObjectEntryUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT - { - return ObjectEntryUsageFlagsNVX( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ObjectEntryUsageFlagsNVX operator~( ObjectEntryUsageFlagBitsNVX bits ) VULKAN_HPP_NOEXCEPT - { - return ~( ObjectEntryUsageFlagsNVX( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( ObjectEntryUsageFlagsNVX value ) - { - if ( !value ) return "{}"; - std::string result; - - if ( value & ObjectEntryUsageFlagBitsNVX::eGraphics ) result += "Graphics | "; - if ( value & ObjectEntryUsageFlagBitsNVX::eCompute ) result += "Compute | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - using PeerMemoryFeatureFlags = Flags; + using PeerMemoryFeatureFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(PeerMemoryFeatureFlagBits::eCopySrc) | VkFlags(PeerMemoryFeatureFlagBits::eCopyDst) | VkFlags(PeerMemoryFeatureFlagBits::eGenericSrc) | VkFlags(PeerMemoryFeatureFlagBits::eGenericDst) }; @@ -10753,6 +11901,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -10763,11 +11912,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using PerformanceCounterDescriptionFlagsKHR = Flags; + + using PerformanceCounterDescriptionFlagsKHR = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting) | VkFlags(PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted) }; @@ -10795,6 +11945,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagsKHR value ) { + if ( !value ) return "{}"; std::string result; @@ -10803,28 +11954,73 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using PipelineCacheCreateFlags = Flags; - VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlags ) + using PipelineCacheCreateFlags = Flags; + + template <> struct FlagTraits { - return "{}"; + enum : VkFlags + { + allFlags = VkFlags(PipelineCacheCreateFlagBits::eExternallySynchronizedEXT) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator|( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCacheCreateFlags( bit0 ) | bit1; } - using PipelineColorBlendStateCreateFlags = Flags; + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator&( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCacheCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator^( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCacheCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator~( PipelineCacheCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( PipelineCacheCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlags value ) + { + + if ( !value ) return "{}"; + std::string result; + + if ( value & PipelineCacheCreateFlagBits::eExternallySynchronizedEXT ) result += "ExternallySynchronizedEXT | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + enum class PipelineColorBlendStateCreateFlagBits : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlagBits ) + { + return "(void)"; + } + + using PipelineColorBlendStateCreateFlags = Flags; VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlags ) { + return "{}"; } - using PipelineCompilerControlFlagsAMD = Flags; + + using PipelineCompilerControlFlagsAMD = Flags; VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagsAMD ) { + return "{}"; } - enum class PipelineCoverageModulationStateCreateFlagBitsNV + enum class PipelineCoverageModulationStateCreateFlagBitsNV : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagBitsNV ) @@ -10832,14 +12028,15 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using PipelineCoverageModulationStateCreateFlagsNV = Flags; + using PipelineCoverageModulationStateCreateFlagsNV = Flags; VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagsNV ) { + return "{}"; } - enum class PipelineCoverageReductionStateCreateFlagBitsNV + enum class PipelineCoverageReductionStateCreateFlagBitsNV : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagBitsNV ) @@ -10847,14 +12044,15 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using PipelineCoverageReductionStateCreateFlagsNV = Flags; + using PipelineCoverageReductionStateCreateFlagsNV = Flags; VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagsNV ) { + return "{}"; } - enum class PipelineCoverageToColorStateCreateFlagBitsNV + enum class PipelineCoverageToColorStateCreateFlagBitsNV : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagBitsNV ) @@ -10862,20 +12060,22 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using PipelineCoverageToColorStateCreateFlagsNV = Flags; + using PipelineCoverageToColorStateCreateFlagsNV = Flags; VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagsNV ) { + return "{}"; } - using PipelineCreateFlags = Flags; + + using PipelineCreateFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { - allFlags = VkFlags(PipelineCreateFlagBits::eDisableOptimization) | VkFlags(PipelineCreateFlagBits::eAllowDerivatives) | VkFlags(PipelineCreateFlagBits::eDerivative) | VkFlags(PipelineCreateFlagBits::eViewIndexFromDeviceIndex) | VkFlags(PipelineCreateFlagBits::eDispatchBase) | VkFlags(PipelineCreateFlagBits::eDeferCompileNV) | VkFlags(PipelineCreateFlagBits::eCaptureStatisticsKHR) | VkFlags(PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR) + allFlags = VkFlags(PipelineCreateFlagBits::eDisableOptimization) | VkFlags(PipelineCreateFlagBits::eAllowDerivatives) | VkFlags(PipelineCreateFlagBits::eDerivative) | VkFlags(PipelineCreateFlagBits::eViewIndexFromDeviceIndex) | VkFlags(PipelineCreateFlagBits::eDispatchBase) | VkFlags(PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingSkipAabbsKHR) | VkFlags(PipelineCreateFlagBits::eDeferCompileNV) | VkFlags(PipelineCreateFlagBits::eCaptureStatisticsKHR) | VkFlags(PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR) | VkFlags(PipelineCreateFlagBits::eIndirectBindableNV) | VkFlags(PipelineCreateFlagBits::eLibraryKHR) | VkFlags(PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT) | VkFlags(PipelineCreateFlagBits::eEarlyReturnOnFailureEXT) }; }; @@ -10901,6 +12101,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -10909,17 +12110,28 @@ namespace VULKAN_HPP_NAMESPACE if ( value & PipelineCreateFlagBits::eDerivative ) result += "Derivative | "; if ( value & PipelineCreateFlagBits::eViewIndexFromDeviceIndex ) result += "ViewIndexFromDeviceIndex | "; if ( value & PipelineCreateFlagBits::eDispatchBase ) result += "DispatchBase | "; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR ) result += "RayTracingNoNullAnyHitShadersKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR ) result += "RayTracingNoNullClosestHitShadersKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR ) result += "RayTracingNoNullMissShadersKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR ) result += "RayTracingNoNullIntersectionShadersKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR ) result += "RayTracingSkipTrianglesKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingSkipAabbsKHR ) result += "RayTracingSkipAabbsKHR | "; if ( value & PipelineCreateFlagBits::eDeferCompileNV ) result += "DeferCompileNV | "; if ( value & PipelineCreateFlagBits::eCaptureStatisticsKHR ) result += "CaptureStatisticsKHR | "; if ( value & PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) result += "CaptureInternalRepresentationsKHR | "; + if ( value & PipelineCreateFlagBits::eIndirectBindableNV ) result += "IndirectBindableNV | "; + if ( value & PipelineCreateFlagBits::eLibraryKHR ) result += "LibraryKHR | "; + if ( value & PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT ) result += "FailOnPipelineCompileRequiredEXT | "; + if ( value & PipelineCreateFlagBits::eEarlyReturnOnFailureEXT ) result += "EarlyReturnOnFailureEXT | "; return "{ " + result.substr(0, result.size() - 3) + " }"; } - using PipelineCreationFeedbackFlagsEXT = Flags; + + using PipelineCreationFeedbackFlagsEXT = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(PipelineCreationFeedbackFlagBitsEXT::eValid) | VkFlags(PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit) | VkFlags(PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration) }; @@ -10947,6 +12159,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagsEXT value ) { + if ( !value ) return "{}"; std::string result; @@ -10956,14 +12169,23 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using PipelineDepthStencilStateCreateFlags = Flags; + enum class PipelineDepthStencilStateCreateFlagBits : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlagBits ) + { + return "(void)"; + } + + using PipelineDepthStencilStateCreateFlags = Flags; VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlags ) { + return "{}"; } - enum class PipelineDiscardRectangleStateCreateFlagBitsEXT + enum class PipelineDiscardRectangleStateCreateFlagBitsEXT : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagBitsEXT ) @@ -10971,42 +12193,79 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using PipelineDiscardRectangleStateCreateFlagsEXT = Flags; + using PipelineDiscardRectangleStateCreateFlagsEXT = Flags; VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagsEXT ) { + return "{}"; } - using PipelineDynamicStateCreateFlags = Flags; + enum class PipelineDynamicStateCreateFlagBits : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlagBits ) + { + return "(void)"; + } + + using PipelineDynamicStateCreateFlags = Flags; VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlags ) { + return "{}"; } - using PipelineInputAssemblyStateCreateFlags = Flags; + enum class PipelineInputAssemblyStateCreateFlagBits : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlagBits ) + { + return "(void)"; + } + + using PipelineInputAssemblyStateCreateFlags = Flags; VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlags ) { + return "{}"; } - using PipelineLayoutCreateFlags = Flags; + enum class PipelineLayoutCreateFlagBits : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlagBits ) + { + return "(void)"; + } + + using PipelineLayoutCreateFlags = Flags; VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlags ) { + return "{}"; } - using PipelineMultisampleStateCreateFlags = Flags; + enum class PipelineMultisampleStateCreateFlagBits : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlagBits ) + { + return "(void)"; + } + + using PipelineMultisampleStateCreateFlags = Flags; VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlags ) { + return "{}"; } - enum class PipelineRasterizationConservativeStateCreateFlagBitsEXT + enum class PipelineRasterizationConservativeStateCreateFlagBitsEXT : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagBitsEXT ) @@ -11014,14 +12273,15 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using PipelineRasterizationConservativeStateCreateFlagsEXT = Flags; + using PipelineRasterizationConservativeStateCreateFlagsEXT = Flags; VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagsEXT ) { + return "{}"; } - enum class PipelineRasterizationDepthClipStateCreateFlagBitsEXT + enum class PipelineRasterizationDepthClipStateCreateFlagBitsEXT : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagBitsEXT ) @@ -11029,21 +12289,31 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using PipelineRasterizationDepthClipStateCreateFlagsEXT = Flags; + using PipelineRasterizationDepthClipStateCreateFlagsEXT = Flags; VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagsEXT ) { + return "{}"; } - using PipelineRasterizationStateCreateFlags = Flags; + enum class PipelineRasterizationStateCreateFlagBits : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlagBits ) + { + return "(void)"; + } + + using PipelineRasterizationStateCreateFlags = Flags; VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlags ) { + return "{}"; } - enum class PipelineRasterizationStateStreamCreateFlagBitsEXT + enum class PipelineRasterizationStateStreamCreateFlagBitsEXT : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagBitsEXT ) @@ -11051,18 +12321,20 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using PipelineRasterizationStateStreamCreateFlagsEXT = Flags; + using PipelineRasterizationStateStreamCreateFlagsEXT = Flags; VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagsEXT ) { + return "{}"; } - using PipelineShaderStageCreateFlags = Flags; + + using PipelineShaderStageCreateFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT) | VkFlags(PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT) }; @@ -11090,6 +12362,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -11098,13 +12371,14 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using PipelineStageFlags = Flags; + + using PipelineStageFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { - allFlags = VkFlags(PipelineStageFlagBits::eTopOfPipe) | VkFlags(PipelineStageFlagBits::eDrawIndirect) | VkFlags(PipelineStageFlagBits::eVertexInput) | VkFlags(PipelineStageFlagBits::eVertexShader) | VkFlags(PipelineStageFlagBits::eTessellationControlShader) | VkFlags(PipelineStageFlagBits::eTessellationEvaluationShader) | VkFlags(PipelineStageFlagBits::eGeometryShader) | VkFlags(PipelineStageFlagBits::eFragmentShader) | VkFlags(PipelineStageFlagBits::eEarlyFragmentTests) | VkFlags(PipelineStageFlagBits::eLateFragmentTests) | VkFlags(PipelineStageFlagBits::eColorAttachmentOutput) | VkFlags(PipelineStageFlagBits::eComputeShader) | VkFlags(PipelineStageFlagBits::eTransfer) | VkFlags(PipelineStageFlagBits::eBottomOfPipe) | VkFlags(PipelineStageFlagBits::eHost) | VkFlags(PipelineStageFlagBits::eAllGraphics) | VkFlags(PipelineStageFlagBits::eAllCommands) | VkFlags(PipelineStageFlagBits::eTransformFeedbackEXT) | VkFlags(PipelineStageFlagBits::eConditionalRenderingEXT) | VkFlags(PipelineStageFlagBits::eCommandProcessNVX) | VkFlags(PipelineStageFlagBits::eShadingRateImageNV) | VkFlags(PipelineStageFlagBits::eRayTracingShaderNV) | VkFlags(PipelineStageFlagBits::eAccelerationStructureBuildNV) | VkFlags(PipelineStageFlagBits::eTaskShaderNV) | VkFlags(PipelineStageFlagBits::eMeshShaderNV) | VkFlags(PipelineStageFlagBits::eFragmentDensityProcessEXT) + allFlags = VkFlags(PipelineStageFlagBits::eTopOfPipe) | VkFlags(PipelineStageFlagBits::eDrawIndirect) | VkFlags(PipelineStageFlagBits::eVertexInput) | VkFlags(PipelineStageFlagBits::eVertexShader) | VkFlags(PipelineStageFlagBits::eTessellationControlShader) | VkFlags(PipelineStageFlagBits::eTessellationEvaluationShader) | VkFlags(PipelineStageFlagBits::eGeometryShader) | VkFlags(PipelineStageFlagBits::eFragmentShader) | VkFlags(PipelineStageFlagBits::eEarlyFragmentTests) | VkFlags(PipelineStageFlagBits::eLateFragmentTests) | VkFlags(PipelineStageFlagBits::eColorAttachmentOutput) | VkFlags(PipelineStageFlagBits::eComputeShader) | VkFlags(PipelineStageFlagBits::eTransfer) | VkFlags(PipelineStageFlagBits::eBottomOfPipe) | VkFlags(PipelineStageFlagBits::eHost) | VkFlags(PipelineStageFlagBits::eAllGraphics) | VkFlags(PipelineStageFlagBits::eAllCommands) | VkFlags(PipelineStageFlagBits::eTransformFeedbackEXT) | VkFlags(PipelineStageFlagBits::eConditionalRenderingEXT) | VkFlags(PipelineStageFlagBits::eRayTracingShaderKHR) | VkFlags(PipelineStageFlagBits::eAccelerationStructureBuildKHR) | VkFlags(PipelineStageFlagBits::eShadingRateImageNV) | VkFlags(PipelineStageFlagBits::eTaskShaderNV) | VkFlags(PipelineStageFlagBits::eMeshShaderNV) | VkFlags(PipelineStageFlagBits::eFragmentDensityProcessEXT) | VkFlags(PipelineStageFlagBits::eCommandPreprocessNV) }; }; @@ -11130,6 +12404,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -11152,38 +12427,65 @@ namespace VULKAN_HPP_NAMESPACE if ( value & PipelineStageFlagBits::eAllCommands ) result += "AllCommands | "; if ( value & PipelineStageFlagBits::eTransformFeedbackEXT ) result += "TransformFeedbackEXT | "; if ( value & PipelineStageFlagBits::eConditionalRenderingEXT ) result += "ConditionalRenderingEXT | "; - if ( value & PipelineStageFlagBits::eCommandProcessNVX ) result += "CommandProcessNVX | "; + if ( value & PipelineStageFlagBits::eRayTracingShaderKHR ) result += "RayTracingShaderKHR | "; + if ( value & PipelineStageFlagBits::eAccelerationStructureBuildKHR ) result += "AccelerationStructureBuildKHR | "; if ( value & PipelineStageFlagBits::eShadingRateImageNV ) result += "ShadingRateImageNV | "; - if ( value & PipelineStageFlagBits::eRayTracingShaderNV ) result += "RayTracingShaderNV | "; - if ( value & PipelineStageFlagBits::eAccelerationStructureBuildNV ) result += "AccelerationStructureBuildNV | "; if ( value & PipelineStageFlagBits::eTaskShaderNV ) result += "TaskShaderNV | "; if ( value & PipelineStageFlagBits::eMeshShaderNV ) result += "MeshShaderNV | "; if ( value & PipelineStageFlagBits::eFragmentDensityProcessEXT ) result += "FragmentDensityProcessEXT | "; + if ( value & PipelineStageFlagBits::eCommandPreprocessNV ) result += "CommandPreprocessNV | "; return "{ " + result.substr(0, result.size() - 3) + " }"; } - using PipelineTessellationStateCreateFlags = Flags; + enum class PipelineTessellationStateCreateFlagBits : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlagBits ) + { + return "(void)"; + } + + using PipelineTessellationStateCreateFlags = Flags; VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlags ) { + return "{}"; } - using PipelineVertexInputStateCreateFlags = Flags; + enum class PipelineVertexInputStateCreateFlagBits : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlagBits ) + { + return "(void)"; + } + + using PipelineVertexInputStateCreateFlags = Flags; VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlags ) { + return "{}"; } - using PipelineViewportStateCreateFlags = Flags; + enum class PipelineViewportStateCreateFlagBits : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlagBits ) + { + return "(void)"; + } + + using PipelineViewportStateCreateFlags = Flags; VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlags ) { + return "{}"; } - enum class PipelineViewportSwizzleStateCreateFlagBitsNV + enum class PipelineViewportSwizzleStateCreateFlagBitsNV : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagBitsNV ) @@ -11191,18 +12493,29 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using PipelineViewportSwizzleStateCreateFlagsNV = Flags; + using PipelineViewportSwizzleStateCreateFlagsNV = Flags; VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagsNV ) { + return "{}"; } - using QueryControlFlags = Flags; + + using PrivateDataSlotCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( PrivateDataSlotCreateFlagsEXT ) + { + + return "{}"; + } + + + using QueryControlFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(QueryControlFlagBits::ePrecise) }; @@ -11230,6 +12543,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( QueryControlFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -11237,11 +12551,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using QueryPipelineStatisticFlags = Flags; + + using QueryPipelineStatisticFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyVertices) | VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eVertexShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eClippingInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eClippingPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eFragmentShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eComputeShaderInvocations) }; @@ -11269,6 +12584,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -11286,18 +12602,21 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using QueryPoolCreateFlags = Flags; + + using QueryPoolCreateFlags = Flags; VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlags ) { + return "{}"; } - using QueryResultFlags = Flags; + + using QueryResultFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(QueryResultFlagBits::e64) | VkFlags(QueryResultFlagBits::eWait) | VkFlags(QueryResultFlagBits::eWithAvailability) | VkFlags(QueryResultFlagBits::ePartial) }; @@ -11325,6 +12644,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( QueryResultFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -11335,11 +12655,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using QueueFlags = Flags; + + using QueueFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(QueueFlagBits::eGraphics) | VkFlags(QueueFlagBits::eCompute) | VkFlags(QueueFlagBits::eTransfer) | VkFlags(QueueFlagBits::eSparseBinding) | VkFlags(QueueFlagBits::eProtected) }; @@ -11367,6 +12688,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( QueueFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -11378,18 +12700,53 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using RenderPassCreateFlags = Flags; - VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlags ) + using RenderPassCreateFlags = Flags; + + template <> struct FlagTraits { - return "{}"; + enum : VkFlags + { + allFlags = VkFlags(RenderPassCreateFlagBits::eTransformQCOM) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator|( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return RenderPassCreateFlags( bit0 ) | bit1; } - using ResolveModeFlags = Flags; + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator&( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return RenderPassCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator^( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return RenderPassCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator~( RenderPassCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( RenderPassCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlags value ) + { + + if ( !value ) return "{}"; + std::string result; + + if ( value & RenderPassCreateFlagBits::eTransformQCOM ) result += "TransformQCOM | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + + using ResolveModeFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(ResolveModeFlagBits::eNone) | VkFlags(ResolveModeFlagBits::eSampleZero) | VkFlags(ResolveModeFlagBits::eAverage) | VkFlags(ResolveModeFlagBits::eMin) | VkFlags(ResolveModeFlagBits::eMax) }; @@ -11419,6 +12776,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( ResolveModeFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -11429,11 +12787,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using SampleCountFlags = Flags; + + using SampleCountFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(SampleCountFlagBits::e1) | VkFlags(SampleCountFlagBits::e2) | VkFlags(SampleCountFlagBits::e4) | VkFlags(SampleCountFlagBits::e8) | VkFlags(SampleCountFlagBits::e16) | VkFlags(SampleCountFlagBits::e32) | VkFlags(SampleCountFlagBits::e64) }; @@ -11461,6 +12820,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( SampleCountFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -11474,11 +12834,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using SamplerCreateFlags = Flags; + + using SamplerCreateFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(SamplerCreateFlagBits::eSubsampledEXT) | VkFlags(SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT) }; @@ -11506,6 +12867,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -11514,18 +12876,21 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using SemaphoreCreateFlags = Flags; + + using SemaphoreCreateFlags = Flags; VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlags ) { + return "{}"; } - using SemaphoreImportFlags = Flags; + + using SemaphoreImportFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(SemaphoreImportFlagBits::eTemporary) }; @@ -11555,6 +12920,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -11562,11 +12928,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using SemaphoreWaitFlags = Flags; + + using SemaphoreWaitFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(SemaphoreWaitFlagBits::eAny) }; @@ -11596,6 +12963,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -11603,27 +12971,32 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using ShaderCorePropertiesFlagsAMD = Flags; + + using ShaderCorePropertiesFlagsAMD = Flags; VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagsAMD ) { + return "{}"; } - using ShaderModuleCreateFlags = Flags; + + using ShaderModuleCreateFlags = Flags; VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlags ) { + return "{}"; } - using ShaderStageFlags = Flags; + + using ShaderStageFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { - allFlags = VkFlags(ShaderStageFlagBits::eVertex) | VkFlags(ShaderStageFlagBits::eTessellationControl) | VkFlags(ShaderStageFlagBits::eTessellationEvaluation) | VkFlags(ShaderStageFlagBits::eGeometry) | VkFlags(ShaderStageFlagBits::eFragment) | VkFlags(ShaderStageFlagBits::eCompute) | VkFlags(ShaderStageFlagBits::eAllGraphics) | VkFlags(ShaderStageFlagBits::eAll) | VkFlags(ShaderStageFlagBits::eRaygenNV) | VkFlags(ShaderStageFlagBits::eAnyHitNV) | VkFlags(ShaderStageFlagBits::eClosestHitNV) | VkFlags(ShaderStageFlagBits::eMissNV) | VkFlags(ShaderStageFlagBits::eIntersectionNV) | VkFlags(ShaderStageFlagBits::eCallableNV) | VkFlags(ShaderStageFlagBits::eTaskNV) | VkFlags(ShaderStageFlagBits::eMeshNV) + allFlags = VkFlags(ShaderStageFlagBits::eVertex) | VkFlags(ShaderStageFlagBits::eTessellationControl) | VkFlags(ShaderStageFlagBits::eTessellationEvaluation) | VkFlags(ShaderStageFlagBits::eGeometry) | VkFlags(ShaderStageFlagBits::eFragment) | VkFlags(ShaderStageFlagBits::eCompute) | VkFlags(ShaderStageFlagBits::eAllGraphics) | VkFlags(ShaderStageFlagBits::eAll) | VkFlags(ShaderStageFlagBits::eRaygenKHR) | VkFlags(ShaderStageFlagBits::eAnyHitKHR) | VkFlags(ShaderStageFlagBits::eClosestHitKHR) | VkFlags(ShaderStageFlagBits::eMissKHR) | VkFlags(ShaderStageFlagBits::eIntersectionKHR) | VkFlags(ShaderStageFlagBits::eCallableKHR) | VkFlags(ShaderStageFlagBits::eTaskNV) | VkFlags(ShaderStageFlagBits::eMeshNV) }; }; @@ -11649,6 +13022,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( ShaderStageFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -11658,22 +13032,23 @@ namespace VULKAN_HPP_NAMESPACE if ( value & ShaderStageFlagBits::eGeometry ) result += "Geometry | "; if ( value & ShaderStageFlagBits::eFragment ) result += "Fragment | "; if ( value & ShaderStageFlagBits::eCompute ) result += "Compute | "; - if ( value & ShaderStageFlagBits::eRaygenNV ) result += "RaygenNV | "; - if ( value & ShaderStageFlagBits::eAnyHitNV ) result += "AnyHitNV | "; - if ( value & ShaderStageFlagBits::eClosestHitNV ) result += "ClosestHitNV | "; - if ( value & ShaderStageFlagBits::eMissNV ) result += "MissNV | "; - if ( value & ShaderStageFlagBits::eIntersectionNV ) result += "IntersectionNV | "; - if ( value & ShaderStageFlagBits::eCallableNV ) result += "CallableNV | "; + if ( value & ShaderStageFlagBits::eRaygenKHR ) result += "RaygenKHR | "; + if ( value & ShaderStageFlagBits::eAnyHitKHR ) result += "AnyHitKHR | "; + if ( value & ShaderStageFlagBits::eClosestHitKHR ) result += "ClosestHitKHR | "; + if ( value & ShaderStageFlagBits::eMissKHR ) result += "MissKHR | "; + if ( value & ShaderStageFlagBits::eIntersectionKHR ) result += "IntersectionKHR | "; + if ( value & ShaderStageFlagBits::eCallableKHR ) result += "CallableKHR | "; if ( value & ShaderStageFlagBits::eTaskNV ) result += "TaskNV | "; if ( value & ShaderStageFlagBits::eMeshNV ) result += "MeshNV | "; return "{ " + result.substr(0, result.size() - 3) + " }"; } - using SparseImageFormatFlags = Flags; + + using SparseImageFormatFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(SparseImageFormatFlagBits::eSingleMiptail) | VkFlags(SparseImageFormatFlagBits::eAlignedMipSize) | VkFlags(SparseImageFormatFlagBits::eNonstandardBlockSize) }; @@ -11701,6 +13076,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -11710,11 +13086,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using SparseMemoryBindFlags = Flags; + + using SparseMemoryBindFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(SparseMemoryBindFlagBits::eMetadata) }; @@ -11742,6 +13119,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -11749,11 +13127,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using StencilFaceFlags = Flags; + + using StencilFaceFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(StencilFaceFlagBits::eFront) | VkFlags(StencilFaceFlagBits::eBack) | VkFlags(StencilFaceFlagBits::eFrontAndBack) }; @@ -11781,6 +13160,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( StencilFaceFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -11790,7 +13170,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifdef VK_USE_PLATFORM_GGP - enum class StreamDescriptorSurfaceCreateFlagBitsGGP + enum class StreamDescriptorSurfaceCreateFlagBitsGGP : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagBitsGGP ) @@ -11798,19 +13178,21 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using StreamDescriptorSurfaceCreateFlagsGGP = Flags; + using StreamDescriptorSurfaceCreateFlagsGGP = Flags; VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagsGGP ) { + return "{}"; } #endif /*VK_USE_PLATFORM_GGP*/ - using SubgroupFeatureFlags = Flags; + + using SubgroupFeatureFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(SubgroupFeatureFlagBits::eBasic) | VkFlags(SubgroupFeatureFlagBits::eVote) | VkFlags(SubgroupFeatureFlagBits::eArithmetic) | VkFlags(SubgroupFeatureFlagBits::eBallot) | VkFlags(SubgroupFeatureFlagBits::eShuffle) | VkFlags(SubgroupFeatureFlagBits::eShuffleRelative) | VkFlags(SubgroupFeatureFlagBits::eClustered) | VkFlags(SubgroupFeatureFlagBits::eQuad) | VkFlags(SubgroupFeatureFlagBits::ePartitionedNV) }; @@ -11838,6 +13220,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlags value ) { + if ( !value ) return "{}"; std::string result; @@ -11853,13 +13236,14 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using SubpassDescriptionFlags = Flags; + + using SubpassDescriptionFlags = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { - allFlags = VkFlags(SubpassDescriptionFlagBits::ePerViewAttributesNVX) | VkFlags(SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX) + allFlags = VkFlags(SubpassDescriptionFlagBits::ePerViewAttributesNVX) | VkFlags(SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX) | VkFlags(SubpassDescriptionFlagBits::eFragmentRegionQCOM) | VkFlags(SubpassDescriptionFlagBits::eShaderResolveQCOM) }; }; @@ -11885,19 +13269,23 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlags value ) { + if ( !value ) return "{}"; std::string result; if ( value & SubpassDescriptionFlagBits::ePerViewAttributesNVX ) result += "PerViewAttributesNVX | "; if ( value & SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX ) result += "PerViewPositionXOnlyNVX | "; + if ( value & SubpassDescriptionFlagBits::eFragmentRegionQCOM ) result += "FragmentRegionQCOM | "; + if ( value & SubpassDescriptionFlagBits::eShaderResolveQCOM ) result += "ShaderResolveQCOM | "; return "{ " + result.substr(0, result.size() - 3) + " }"; } - using SurfaceCounterFlagsEXT = Flags; + + using SurfaceCounterFlagsEXT = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(SurfaceCounterFlagBitsEXT::eVblank) }; @@ -11925,6 +13313,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagsEXT value ) { + if ( !value ) return "{}"; std::string result; @@ -11932,11 +13321,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using SurfaceTransformFlagsKHR = Flags; + + using SurfaceTransformFlagsKHR = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(SurfaceTransformFlagBitsKHR::eIdentity) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirror) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eInherit) }; @@ -11964,6 +13354,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagsKHR value ) { + if ( !value ) return "{}"; std::string result; @@ -11979,11 +13370,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using SwapchainCreateFlagsKHR = Flags; + + using SwapchainCreateFlagsKHR = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions) | VkFlags(SwapchainCreateFlagBitsKHR::eProtected) | VkFlags(SwapchainCreateFlagBitsKHR::eMutableFormat) }; @@ -12011,6 +13403,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagsKHR value ) { + if ( !value ) return "{}"; std::string result; @@ -12020,11 +13413,12 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - using ToolPurposeFlagsEXT = Flags; + + using ToolPurposeFlagsEXT = Flags; template <> struct FlagTraits { - enum + enum : VkFlags { allFlags = VkFlags(ToolPurposeFlagBitsEXT::eValidation) | VkFlags(ToolPurposeFlagBitsEXT::eProfiling) | VkFlags(ToolPurposeFlagBitsEXT::eTracing) | VkFlags(ToolPurposeFlagBitsEXT::eAdditionalFeatures) | VkFlags(ToolPurposeFlagBitsEXT::eModifyingFeatures) | VkFlags(ToolPurposeFlagBitsEXT::eDebugReporting) | VkFlags(ToolPurposeFlagBitsEXT::eDebugMarkers) }; @@ -12052,6 +13446,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlagsEXT value ) { + if ( !value ) return "{}"; std::string result; @@ -12065,7 +13460,7 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr(0, result.size() - 3) + " }"; } - enum class ValidationCacheCreateFlagBitsEXT + enum class ValidationCacheCreateFlagBitsEXT : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagBitsEXT ) @@ -12073,15 +13468,16 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using ValidationCacheCreateFlagsEXT = Flags; + using ValidationCacheCreateFlagsEXT = Flags; VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagsEXT ) { + return "{}"; } #ifdef VK_USE_PLATFORM_VI_NN - enum class ViSurfaceCreateFlagBitsNN + enum class ViSurfaceCreateFlagBitsNN : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagBitsNN ) @@ -12089,16 +13485,17 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using ViSurfaceCreateFlagsNN = Flags; + using ViSurfaceCreateFlagsNN = Flags; VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagsNN ) { + return "{}"; } #endif /*VK_USE_PLATFORM_VI_NN*/ #ifdef VK_USE_PLATFORM_WAYLAND_KHR - enum class WaylandSurfaceCreateFlagBitsKHR + enum class WaylandSurfaceCreateFlagBitsKHR : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagBitsKHR ) @@ -12106,16 +13503,17 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using WaylandSurfaceCreateFlagsKHR = Flags; + using WaylandSurfaceCreateFlagsKHR = Flags; VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagsKHR ) { + return "{}"; } #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - enum class Win32SurfaceCreateFlagBitsKHR + enum class Win32SurfaceCreateFlagBitsKHR : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagBitsKHR ) @@ -12123,16 +13521,17 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using Win32SurfaceCreateFlagsKHR = Flags; + using Win32SurfaceCreateFlagsKHR = Flags; VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagsKHR ) { + return "{}"; } #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_XCB_KHR - enum class XcbSurfaceCreateFlagBitsKHR + enum class XcbSurfaceCreateFlagBitsKHR : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagBitsKHR ) @@ -12140,16 +13539,17 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using XcbSurfaceCreateFlagsKHR = Flags; + using XcbSurfaceCreateFlagsKHR = Flags; VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagsKHR ) { + return "{}"; } #endif /*VK_USE_PLATFORM_XCB_KHR*/ #ifdef VK_USE_PLATFORM_XLIB_KHR - enum class XlibSurfaceCreateFlagBitsKHR + enum class XlibSurfaceCreateFlagBitsKHR : VkFlags {}; VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagBitsKHR ) @@ -12157,10 +13557,11 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - using XlibSurfaceCreateFlagsKHR = Flags; + using XlibSurfaceCreateFlagsKHR = Flags; VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagsKHR ) { + return "{}"; } #endif /*VK_USE_PLATFORM_XLIB_KHR*/ @@ -12448,6 +13849,15 @@ namespace VULKAN_HPP_NAMESPACE : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {} }; + class IncompatibleVersionKHRError : public SystemError + { + public: + IncompatibleVersionKHRError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorIncompatibleVersionKHR ), message ) {} + IncompatibleVersionKHRError( char const * message ) + : SystemError( make_error_code( Result::eErrorIncompatibleVersionKHR ), message ) {} + }; + class InvalidDrmFormatModifierPlaneLayoutEXTError : public SystemError { public: @@ -12475,6 +13885,7 @@ namespace VULKAN_HPP_NAMESPACE : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) {} }; + [[noreturn]] static void throwResultException( Result result, char const * message ) { switch ( result ) @@ -12502,6 +13913,7 @@ namespace VULKAN_HPP_NAMESPACE case Result::eErrorIncompatibleDisplayKHR: throw IncompatibleDisplayKHRError( message ); case Result::eErrorValidationFailedEXT: throw ValidationFailedEXTError( message ); case Result::eErrorInvalidShaderNV: throw InvalidShaderNVError( message ); + case Result::eErrorIncompatibleVersionKHR: throw IncompatibleVersionKHRError( message ); case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: throw InvalidDrmFormatModifierPlaneLayoutEXTError( message ); case Result::eErrorNotPermittedEXT: throw NotPermittedEXTError( message ); case Result::eErrorFullScreenExclusiveModeLostEXT: throw FullScreenExclusiveModeLostEXTError( message ); @@ -12537,8 +13949,92 @@ namespace VULKAN_HPP_NAMESPACE T value; operator std::tuple() VULKAN_HPP_NOEXCEPT { return std::tuple(result, value); } + +#if !defined(VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST) + VULKAN_HPP_DEPRECATED("Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue.") + operator T const& () const & VULKAN_HPP_NOEXCEPT + { + return value; + } + + VULKAN_HPP_DEPRECATED("Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue.") + operator T& () & VULKAN_HPP_NOEXCEPT + { + return value; + } + + VULKAN_HPP_DEPRECATED("Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue.") + operator T const&& () const && VULKAN_HPP_NOEXCEPT + { + return std::move( value ); + } + + VULKAN_HPP_DEPRECATED("Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue.") + operator T&& () && VULKAN_HPP_NOEXCEPT + { + return std::move( value ); + } +#endif }; +#if !defined(VULKAN_HPP_NO_SMART_HANDLE) + template + struct ResultValue> + { +#ifdef VULKAN_HPP_HAS_NOEXCEPT + ResultValue(Result r, UniqueHandle && v) VULKAN_HPP_NOEXCEPT +#else + ResultValue(Result r, UniqueHandle && v) +#endif + : result(r) + , value(std::move(v)) + {} + + std::tuple> asTuple() + { + return std::make_tuple( result, std::move( value ) ); + } + +# if !defined(VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST) + VULKAN_HPP_DEPRECATED("Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue.") + operator UniqueHandle& () & VULKAN_HPP_NOEXCEPT + { + return value; + } + + VULKAN_HPP_DEPRECATED("Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue.") + operator UniqueHandle() VULKAN_HPP_NOEXCEPT + { + return std::move(value); + } +# endif + + Result result; + UniqueHandle value; + }; + + template + struct ResultValue>> + { +# ifdef VULKAN_HPP_HAS_NOEXCEPT + ResultValue( Result r, std::vector> && v ) VULKAN_HPP_NOEXCEPT +# else + ResultValue( Result r, std::vector> && v ) +# endif + : result( r ) + , value( std::move( v ) ) + {} + + Result result; + std::vector> value; + + operator std::tuple> &>() VULKAN_HPP_NOEXCEPT + { + return std::tuple> &>( result, value ); + } + }; +#endif + template struct ResultValueType { @@ -12563,7 +14059,7 @@ namespace VULKAN_HPP_NAMESPACE { #ifdef VULKAN_HPP_NO_EXCEPTIONS ignore(message); - VULKAN_HPP_ASSERT( result == Result::eSuccess ); + VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess ); return result; #else if ( result != Result::eSuccess ) @@ -12578,7 +14074,7 @@ namespace VULKAN_HPP_NAMESPACE { #ifdef VULKAN_HPP_NO_EXCEPTIONS ignore(message); - VULKAN_HPP_ASSERT( result == Result::eSuccess ); + VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess ); return ResultValue( result, std::move( data ) ); #else if ( result != Result::eSuccess ) @@ -12593,7 +14089,7 @@ namespace VULKAN_HPP_NAMESPACE { #ifdef VULKAN_HPP_NO_EXCEPTIONS ignore(message); - VULKAN_HPP_ASSERT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); + VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); #else if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) { @@ -12608,7 +14104,7 @@ namespace VULKAN_HPP_NAMESPACE { #ifdef VULKAN_HPP_NO_EXCEPTIONS ignore(message); - VULKAN_HPP_ASSERT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); + VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); #else if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) { @@ -12624,7 +14120,7 @@ namespace VULKAN_HPP_NAMESPACE { #ifdef VULKAN_HPP_NO_EXCEPTIONS ignore(message); - VULKAN_HPP_ASSERT( result == Result::eSuccess ); + VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess ); return ResultValue>( result, UniqueHandle(data, deleter) ); #else if ( result != Result::eSuccess ) @@ -12634,1381 +14130,1310 @@ namespace VULKAN_HPP_NAMESPACE return UniqueHandle(data, deleter); #endif } + + template + VULKAN_HPP_INLINE ResultValue> + createResultValue( Result result, + T & data, + char const * message, + std::initializer_list successCodes, + typename UniqueHandleTraits::deleter const & deleter ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore( message ); + VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); +# else + if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) + { + throwResultException( result, message ); + } +# endif + return ResultValue>( result, UniqueHandle( data, deleter ) ); + } + + template + VULKAN_HPP_INLINE typename ResultValueType>>::type + createResultValue( Result result, std::vector> && data, char const * message ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore( message ); + VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess ); + return ResultValue>>( result, std::move( data ) ); +# else + if ( result != Result::eSuccess ) + { + throwResultException( result, message ); + } + return std::move( data ); +# endif + } + + template + VULKAN_HPP_INLINE ResultValue>> + createResultValue( Result result, + std::vector> && data, + char const * message, + std::initializer_list successCodes ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore( message ); + VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); +# else + if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) + { + throwResultException( result, message ); + } +# endif + return ResultValue>>( result, std::move( data ) ); + } #endif - struct AccelerationStructureCreateInfoNV; - struct AccelerationStructureInfoNV; - struct AccelerationStructureMemoryRequirementsInfoNV; - struct AcquireNextImageInfoKHR; - struct AcquireProfilingLockInfoKHR; - struct AllocationCallbacks; -#ifdef VK_USE_PLATFORM_ANDROID_KHR - struct AndroidHardwareBufferFormatPropertiesANDROID; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ -#ifdef VK_USE_PLATFORM_ANDROID_KHR - struct AndroidHardwareBufferPropertiesANDROID; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ -#ifdef VK_USE_PLATFORM_ANDROID_KHR - struct AndroidHardwareBufferUsageANDROID; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ -#ifdef VK_USE_PLATFORM_ANDROID_KHR - struct AndroidSurfaceCreateInfoKHR; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - struct ApplicationInfo; - struct AttachmentDescription; - struct AttachmentDescription2; - using AttachmentDescription2KHR = AttachmentDescription2; - struct AttachmentDescriptionStencilLayout; - using AttachmentDescriptionStencilLayoutKHR = AttachmentDescriptionStencilLayout; - struct AttachmentReference; - struct AttachmentReference2; - using AttachmentReference2KHR = AttachmentReference2; - struct AttachmentReferenceStencilLayout; - using AttachmentReferenceStencilLayoutKHR = AttachmentReferenceStencilLayout; - struct AttachmentSampleLocationsEXT; - struct BaseInStructure; - struct BaseOutStructure; - struct BindAccelerationStructureMemoryInfoNV; - struct BindBufferMemoryDeviceGroupInfo; - using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo; - struct BindBufferMemoryInfo; - using BindBufferMemoryInfoKHR = BindBufferMemoryInfo; - struct BindImageMemoryDeviceGroupInfo; - using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo; - struct BindImageMemoryInfo; - using BindImageMemoryInfoKHR = BindImageMemoryInfo; - struct BindImageMemorySwapchainInfoKHR; - struct BindImagePlaneMemoryInfo; - using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo; - struct BindSparseInfo; - struct BufferCopy; - struct BufferCreateInfo; - struct BufferDeviceAddressCreateInfoEXT; - struct BufferDeviceAddressInfo; - using BufferDeviceAddressInfoKHR = BufferDeviceAddressInfo; - using BufferDeviceAddressInfoEXT = BufferDeviceAddressInfo; - struct BufferImageCopy; - struct BufferMemoryBarrier; - struct BufferMemoryRequirementsInfo2; - using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2; - struct BufferOpaqueCaptureAddressCreateInfo; - using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo; - struct BufferViewCreateInfo; - struct CalibratedTimestampInfoEXT; - struct CheckpointDataNV; - struct ClearAttachment; - union ClearColorValue; - struct ClearDepthStencilValue; - struct ClearRect; - union ClearValue; - struct CmdProcessCommandsInfoNVX; - struct CmdReserveSpaceForCommandsInfoNVX; - struct CoarseSampleLocationNV; - struct CoarseSampleOrderCustomNV; - struct CommandBufferAllocateInfo; - struct CommandBufferBeginInfo; - struct CommandBufferInheritanceConditionalRenderingInfoEXT; - struct CommandBufferInheritanceInfo; - struct CommandPoolCreateInfo; - struct ComponentMapping; - struct ComputePipelineCreateInfo; - struct ConditionalRenderingBeginInfoEXT; - struct ConformanceVersion; - using ConformanceVersionKHR = ConformanceVersion; - struct CooperativeMatrixPropertiesNV; - struct CopyDescriptorSet; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct D3D12FenceSubmitInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct DebugMarkerMarkerInfoEXT; - struct DebugMarkerObjectNameInfoEXT; - struct DebugMarkerObjectTagInfoEXT; - struct DebugReportCallbackCreateInfoEXT; - struct DebugUtilsLabelEXT; - struct DebugUtilsMessengerCallbackDataEXT; - struct DebugUtilsMessengerCreateInfoEXT; - struct DebugUtilsObjectNameInfoEXT; - struct DebugUtilsObjectTagInfoEXT; - struct DedicatedAllocationBufferCreateInfoNV; - struct DedicatedAllocationImageCreateInfoNV; - struct DedicatedAllocationMemoryAllocateInfoNV; - struct DescriptorBufferInfo; - struct DescriptorImageInfo; - struct DescriptorPoolCreateInfo; - struct DescriptorPoolInlineUniformBlockCreateInfoEXT; - struct DescriptorPoolSize; - struct DescriptorSetAllocateInfo; - struct DescriptorSetLayoutBinding; - struct DescriptorSetLayoutBindingFlagsCreateInfo; - using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo; - struct DescriptorSetLayoutCreateInfo; - struct DescriptorSetLayoutSupport; - using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport; - struct DescriptorSetVariableDescriptorCountAllocateInfo; - using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo; - struct DescriptorSetVariableDescriptorCountLayoutSupport; - using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport; - struct DescriptorUpdateTemplateCreateInfo; - using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo; - struct DescriptorUpdateTemplateEntry; - using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry; - struct DeviceCreateInfo; - struct DeviceEventInfoEXT; - struct DeviceGeneratedCommandsFeaturesNVX; - struct DeviceGeneratedCommandsLimitsNVX; - struct DeviceGroupBindSparseInfo; - using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo; - struct DeviceGroupCommandBufferBeginInfo; - using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo; - struct DeviceGroupDeviceCreateInfo; - using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo; - struct DeviceGroupPresentCapabilitiesKHR; - struct DeviceGroupPresentInfoKHR; - struct DeviceGroupRenderPassBeginInfo; - using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo; - struct DeviceGroupSubmitInfo; - using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo; - struct DeviceGroupSwapchainCreateInfoKHR; - struct DeviceMemoryOpaqueCaptureAddressInfo; - using DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo; - struct DeviceMemoryOverallocationCreateInfoAMD; - struct DeviceQueueCreateInfo; - struct DeviceQueueGlobalPriorityCreateInfoEXT; - struct DeviceQueueInfo2; - struct DispatchIndirectCommand; - struct DisplayEventInfoEXT; - struct DisplayModeCreateInfoKHR; - struct DisplayModeParametersKHR; - struct DisplayModeProperties2KHR; - struct DisplayModePropertiesKHR; - struct DisplayNativeHdrSurfaceCapabilitiesAMD; - struct DisplayPlaneCapabilities2KHR; - struct DisplayPlaneCapabilitiesKHR; - struct DisplayPlaneInfo2KHR; - struct DisplayPlaneProperties2KHR; - struct DisplayPlanePropertiesKHR; - struct DisplayPowerInfoEXT; - struct DisplayPresentInfoKHR; - struct DisplayProperties2KHR; - struct DisplayPropertiesKHR; - struct DisplaySurfaceCreateInfoKHR; - struct DrawIndexedIndirectCommand; - struct DrawIndirectCommand; - struct DrawMeshTasksIndirectCommandNV; - struct DrmFormatModifierPropertiesEXT; - struct DrmFormatModifierPropertiesListEXT; - struct EventCreateInfo; - struct ExportFenceCreateInfo; - using ExportFenceCreateInfoKHR = ExportFenceCreateInfo; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ExportFenceWin32HandleInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct ExportMemoryAllocateInfo; - using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo; - struct ExportMemoryAllocateInfoNV; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ExportMemoryWin32HandleInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ExportMemoryWin32HandleInfoNV; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct ExportSemaphoreCreateInfo; - using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ExportSemaphoreWin32HandleInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct ExtensionProperties; - struct Extent2D; - struct Extent3D; - struct ExternalBufferProperties; - using ExternalBufferPropertiesKHR = ExternalBufferProperties; - struct ExternalFenceProperties; - using ExternalFencePropertiesKHR = ExternalFenceProperties; -#ifdef VK_USE_PLATFORM_ANDROID_KHR - struct ExternalFormatANDROID; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - struct ExternalImageFormatProperties; - using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties; - struct ExternalImageFormatPropertiesNV; - struct ExternalMemoryBufferCreateInfo; - using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo; - struct ExternalMemoryImageCreateInfo; - using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo; - struct ExternalMemoryImageCreateInfoNV; - struct ExternalMemoryProperties; - using ExternalMemoryPropertiesKHR = ExternalMemoryProperties; - struct ExternalSemaphoreProperties; - using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties; - struct FenceCreateInfo; - struct FenceGetFdInfoKHR; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct FenceGetWin32HandleInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct FilterCubicImageViewImageFormatPropertiesEXT; - struct FormatProperties; - struct FormatProperties2; - using FormatProperties2KHR = FormatProperties2; - struct FramebufferAttachmentImageInfo; - using FramebufferAttachmentImageInfoKHR = FramebufferAttachmentImageInfo; - struct FramebufferAttachmentsCreateInfo; - using FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo; - struct FramebufferCreateInfo; - struct FramebufferMixedSamplesCombinationNV; - struct GeometryAABBNV; - struct GeometryDataNV; - struct GeometryNV; - struct GeometryTrianglesNV; - struct GraphicsPipelineCreateInfo; - struct HdrMetadataEXT; - struct HeadlessSurfaceCreateInfoEXT; -#ifdef VK_USE_PLATFORM_IOS_MVK - struct IOSSurfaceCreateInfoMVK; -#endif /*VK_USE_PLATFORM_IOS_MVK*/ - struct ImageBlit; - struct ImageCopy; - struct ImageCreateInfo; - struct ImageDrmFormatModifierExplicitCreateInfoEXT; - struct ImageDrmFormatModifierListCreateInfoEXT; - struct ImageDrmFormatModifierPropertiesEXT; - struct ImageFormatListCreateInfo; - using ImageFormatListCreateInfoKHR = ImageFormatListCreateInfo; - struct ImageFormatProperties; - struct ImageFormatProperties2; - using ImageFormatProperties2KHR = ImageFormatProperties2; - struct ImageMemoryBarrier; - struct ImageMemoryRequirementsInfo2; - using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2; -#ifdef VK_USE_PLATFORM_FUCHSIA - struct ImagePipeSurfaceCreateInfoFUCHSIA; -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - struct ImagePlaneMemoryRequirementsInfo; - using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo; - struct ImageResolve; - struct ImageSparseMemoryRequirementsInfo2; - using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2; - struct ImageStencilUsageCreateInfo; - using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo; - struct ImageSubresource; - struct ImageSubresourceLayers; - struct ImageSubresourceRange; - struct ImageSwapchainCreateInfoKHR; - struct ImageViewASTCDecodeModeEXT; - struct ImageViewCreateInfo; - struct ImageViewHandleInfoNVX; - struct ImageViewUsageCreateInfo; - using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo; -#ifdef VK_USE_PLATFORM_ANDROID_KHR - struct ImportAndroidHardwareBufferInfoANDROID; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - struct ImportFenceFdInfoKHR; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ImportFenceWin32HandleInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct ImportMemoryFdInfoKHR; - struct ImportMemoryHostPointerInfoEXT; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ImportMemoryWin32HandleInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ImportMemoryWin32HandleInfoNV; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct ImportSemaphoreFdInfoKHR; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ImportSemaphoreWin32HandleInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct IndirectCommandsLayoutCreateInfoNVX; - struct IndirectCommandsLayoutTokenNVX; - struct IndirectCommandsTokenNVX; - struct InitializePerformanceApiInfoINTEL; - struct InputAttachmentAspectReference; - using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference; - struct InstanceCreateInfo; - struct LayerProperties; -#ifdef VK_USE_PLATFORM_MACOS_MVK - struct MacOSSurfaceCreateInfoMVK; -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ - struct MappedMemoryRange; - struct MemoryAllocateFlagsInfo; - using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo; - struct MemoryAllocateInfo; - struct MemoryBarrier; - struct MemoryDedicatedAllocateInfo; - using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo; - struct MemoryDedicatedRequirements; - using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements; - struct MemoryFdPropertiesKHR; -#ifdef VK_USE_PLATFORM_ANDROID_KHR - struct MemoryGetAndroidHardwareBufferInfoANDROID; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - struct MemoryGetFdInfoKHR; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct MemoryGetWin32HandleInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct MemoryHeap; - struct MemoryHostPointerPropertiesEXT; - struct MemoryOpaqueCaptureAddressAllocateInfo; - using MemoryOpaqueCaptureAddressAllocateInfoKHR = MemoryOpaqueCaptureAddressAllocateInfo; - struct MemoryPriorityAllocateInfoEXT; - struct MemoryRequirements; - struct MemoryRequirements2; - using MemoryRequirements2KHR = MemoryRequirements2; - struct MemoryType; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct MemoryWin32HandlePropertiesKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_METAL_EXT - struct MetalSurfaceCreateInfoEXT; -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - struct MultisamplePropertiesEXT; - struct ObjectTableCreateInfoNVX; - struct ObjectTableDescriptorSetEntryNVX; - struct ObjectTableEntryNVX; - struct ObjectTableIndexBufferEntryNVX; - struct ObjectTablePipelineEntryNVX; - struct ObjectTablePushConstantEntryNVX; - struct ObjectTableVertexBufferEntryNVX; - struct Offset2D; - struct Offset3D; - struct PastPresentationTimingGOOGLE; - struct PerformanceConfigurationAcquireInfoINTEL; - struct PerformanceCounterDescriptionKHR; - struct PerformanceCounterKHR; - union PerformanceCounterResultKHR; - struct PerformanceMarkerInfoINTEL; - struct PerformanceOverrideInfoINTEL; - struct PerformanceQuerySubmitInfoKHR; - struct PerformanceStreamMarkerInfoINTEL; - union PerformanceValueDataINTEL; - struct PerformanceValueINTEL; - struct PhysicalDevice16BitStorageFeatures; - using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures; - struct PhysicalDevice8BitStorageFeatures; - using PhysicalDevice8BitStorageFeaturesKHR = PhysicalDevice8BitStorageFeatures; - struct PhysicalDeviceASTCDecodeFeaturesEXT; - struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT; - struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT; - struct PhysicalDeviceBufferDeviceAddressFeatures; - using PhysicalDeviceBufferDeviceAddressFeaturesKHR = PhysicalDeviceBufferDeviceAddressFeatures; - struct PhysicalDeviceBufferDeviceAddressFeaturesEXT; - using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT; - struct PhysicalDeviceCoherentMemoryFeaturesAMD; - struct PhysicalDeviceComputeShaderDerivativesFeaturesNV; - struct PhysicalDeviceConditionalRenderingFeaturesEXT; - struct PhysicalDeviceConservativeRasterizationPropertiesEXT; - struct PhysicalDeviceCooperativeMatrixFeaturesNV; - struct PhysicalDeviceCooperativeMatrixPropertiesNV; - struct PhysicalDeviceCornerSampledImageFeaturesNV; - struct PhysicalDeviceCoverageReductionModeFeaturesNV; - struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; - struct PhysicalDeviceDepthClipEnableFeaturesEXT; - struct PhysicalDeviceDepthStencilResolveProperties; - using PhysicalDeviceDepthStencilResolvePropertiesKHR = PhysicalDeviceDepthStencilResolveProperties; - struct PhysicalDeviceDescriptorIndexingFeatures; - using PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures; - struct PhysicalDeviceDescriptorIndexingProperties; - using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties; - struct PhysicalDeviceDiscardRectanglePropertiesEXT; - struct PhysicalDeviceDriverProperties; - using PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties; - struct PhysicalDeviceExclusiveScissorFeaturesNV; - struct PhysicalDeviceExternalBufferInfo; - using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo; - struct PhysicalDeviceExternalFenceInfo; - using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo; - struct PhysicalDeviceExternalImageFormatInfo; - using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo; - struct PhysicalDeviceExternalMemoryHostPropertiesEXT; - struct PhysicalDeviceExternalSemaphoreInfo; - using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo; - struct PhysicalDeviceFeatures; - struct PhysicalDeviceFeatures2; - using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2; - struct PhysicalDeviceFloatControlsProperties; - using PhysicalDeviceFloatControlsPropertiesKHR = PhysicalDeviceFloatControlsProperties; - struct PhysicalDeviceFragmentDensityMapFeaturesEXT; - struct PhysicalDeviceFragmentDensityMapPropertiesEXT; - struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV; - struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT; - struct PhysicalDeviceGroupProperties; - using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties; - struct PhysicalDeviceHostQueryResetFeatures; - using PhysicalDeviceHostQueryResetFeaturesEXT = PhysicalDeviceHostQueryResetFeatures; - struct PhysicalDeviceIDProperties; - using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties; - struct PhysicalDeviceImageDrmFormatModifierInfoEXT; - struct PhysicalDeviceImageFormatInfo2; - using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2; - struct PhysicalDeviceImageViewImageFormatInfoEXT; - struct PhysicalDeviceImagelessFramebufferFeatures; - using PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures; - struct PhysicalDeviceIndexTypeUint8FeaturesEXT; - struct PhysicalDeviceInlineUniformBlockFeaturesEXT; - struct PhysicalDeviceInlineUniformBlockPropertiesEXT; - struct PhysicalDeviceLimits; - struct PhysicalDeviceLineRasterizationFeaturesEXT; - struct PhysicalDeviceLineRasterizationPropertiesEXT; - struct PhysicalDeviceMaintenance3Properties; - using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties; - struct PhysicalDeviceMemoryBudgetPropertiesEXT; - struct PhysicalDeviceMemoryPriorityFeaturesEXT; - struct PhysicalDeviceMemoryProperties; - struct PhysicalDeviceMemoryProperties2; - using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2; - struct PhysicalDeviceMeshShaderFeaturesNV; - struct PhysicalDeviceMeshShaderPropertiesNV; - struct PhysicalDeviceMultiviewFeatures; - using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures; - struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; - struct PhysicalDeviceMultiviewProperties; - using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties; - struct PhysicalDevicePCIBusInfoPropertiesEXT; - struct PhysicalDevicePerformanceQueryFeaturesKHR; - struct PhysicalDevicePerformanceQueryPropertiesKHR; - struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR; - struct PhysicalDevicePointClippingProperties; - using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties; - struct PhysicalDeviceProperties; - struct PhysicalDeviceProperties2; - using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2; - struct PhysicalDeviceProtectedMemoryFeatures; - struct PhysicalDeviceProtectedMemoryProperties; - struct PhysicalDevicePushDescriptorPropertiesKHR; - struct PhysicalDeviceRayTracingPropertiesNV; - struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV; - struct PhysicalDeviceSampleLocationsPropertiesEXT; - struct PhysicalDeviceSamplerFilterMinmaxProperties; - using PhysicalDeviceSamplerFilterMinmaxPropertiesEXT = PhysicalDeviceSamplerFilterMinmaxProperties; - struct PhysicalDeviceSamplerYcbcrConversionFeatures; - using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures; - struct PhysicalDeviceScalarBlockLayoutFeatures; - using PhysicalDeviceScalarBlockLayoutFeaturesEXT = PhysicalDeviceScalarBlockLayoutFeatures; - struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures; - using PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = PhysicalDeviceSeparateDepthStencilLayoutsFeatures; - struct PhysicalDeviceShaderAtomicInt64Features; - using PhysicalDeviceShaderAtomicInt64FeaturesKHR = PhysicalDeviceShaderAtomicInt64Features; - struct PhysicalDeviceShaderClockFeaturesKHR; - struct PhysicalDeviceShaderCoreProperties2AMD; - struct PhysicalDeviceShaderCorePropertiesAMD; - struct PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; - struct PhysicalDeviceShaderDrawParametersFeatures; - using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures; - struct PhysicalDeviceShaderFloat16Int8Features; - using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; - using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; - struct PhysicalDeviceShaderImageFootprintFeaturesNV; - struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; - struct PhysicalDeviceShaderSMBuiltinsFeaturesNV; - struct PhysicalDeviceShaderSMBuiltinsPropertiesNV; - struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures; - using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures; - struct PhysicalDeviceShadingRateImageFeaturesNV; - struct PhysicalDeviceShadingRateImagePropertiesNV; - struct PhysicalDeviceSparseImageFormatInfo2; - using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2; - struct PhysicalDeviceSparseProperties; - struct PhysicalDeviceSubgroupProperties; - struct PhysicalDeviceSubgroupSizeControlFeaturesEXT; - struct PhysicalDeviceSubgroupSizeControlPropertiesEXT; - struct PhysicalDeviceSurfaceInfo2KHR; - struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT; - struct PhysicalDeviceTexelBufferAlignmentPropertiesEXT; - struct PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT; - struct PhysicalDeviceTimelineSemaphoreFeatures; - using PhysicalDeviceTimelineSemaphoreFeaturesKHR = PhysicalDeviceTimelineSemaphoreFeatures; - struct PhysicalDeviceTimelineSemaphoreProperties; - using PhysicalDeviceTimelineSemaphorePropertiesKHR = PhysicalDeviceTimelineSemaphoreProperties; - struct PhysicalDeviceToolPropertiesEXT; - struct PhysicalDeviceTransformFeedbackFeaturesEXT; - struct PhysicalDeviceTransformFeedbackPropertiesEXT; - struct PhysicalDeviceUniformBufferStandardLayoutFeatures; - using PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = PhysicalDeviceUniformBufferStandardLayoutFeatures; - struct PhysicalDeviceVariablePointersFeatures; - using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures; - using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures; - using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures; - struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT; - struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT; - struct PhysicalDeviceVulkan11Features; - struct PhysicalDeviceVulkan11Properties; - struct PhysicalDeviceVulkan12Features; - struct PhysicalDeviceVulkan12Properties; - struct PhysicalDeviceVulkanMemoryModelFeatures; - using PhysicalDeviceVulkanMemoryModelFeaturesKHR = PhysicalDeviceVulkanMemoryModelFeatures; - struct PhysicalDeviceYcbcrImageArraysFeaturesEXT; - struct PipelineCacheCreateInfo; - struct PipelineColorBlendAdvancedStateCreateInfoEXT; - struct PipelineColorBlendAttachmentState; - struct PipelineColorBlendStateCreateInfo; - struct PipelineCompilerControlCreateInfoAMD; - struct PipelineCoverageModulationStateCreateInfoNV; - struct PipelineCoverageReductionStateCreateInfoNV; - struct PipelineCoverageToColorStateCreateInfoNV; - struct PipelineCreationFeedbackCreateInfoEXT; - struct PipelineCreationFeedbackEXT; - struct PipelineDepthStencilStateCreateInfo; - struct PipelineDiscardRectangleStateCreateInfoEXT; - struct PipelineDynamicStateCreateInfo; - struct PipelineExecutableInfoKHR; - struct PipelineExecutableInternalRepresentationKHR; - struct PipelineExecutablePropertiesKHR; - struct PipelineExecutableStatisticKHR; - union PipelineExecutableStatisticValueKHR; - struct PipelineInfoKHR; - struct PipelineInputAssemblyStateCreateInfo; - struct PipelineLayoutCreateInfo; - struct PipelineMultisampleStateCreateInfo; - struct PipelineRasterizationConservativeStateCreateInfoEXT; - struct PipelineRasterizationDepthClipStateCreateInfoEXT; - struct PipelineRasterizationLineStateCreateInfoEXT; - struct PipelineRasterizationStateCreateInfo; - struct PipelineRasterizationStateRasterizationOrderAMD; - struct PipelineRasterizationStateStreamCreateInfoEXT; - struct PipelineRepresentativeFragmentTestStateCreateInfoNV; - struct PipelineSampleLocationsStateCreateInfoEXT; - struct PipelineShaderStageCreateInfo; - struct PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; - struct PipelineTessellationDomainOriginStateCreateInfo; - using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo; - struct PipelineTessellationStateCreateInfo; - struct PipelineVertexInputDivisorStateCreateInfoEXT; - struct PipelineVertexInputStateCreateInfo; - struct PipelineViewportCoarseSampleOrderStateCreateInfoNV; - struct PipelineViewportExclusiveScissorStateCreateInfoNV; - struct PipelineViewportShadingRateImageStateCreateInfoNV; - struct PipelineViewportStateCreateInfo; - struct PipelineViewportSwizzleStateCreateInfoNV; - struct PipelineViewportWScalingStateCreateInfoNV; -#ifdef VK_USE_PLATFORM_GGP - struct PresentFrameTokenGGP; -#endif /*VK_USE_PLATFORM_GGP*/ - struct PresentInfoKHR; - struct PresentRegionKHR; - struct PresentRegionsKHR; - struct PresentTimeGOOGLE; - struct PresentTimesInfoGOOGLE; - struct ProtectedSubmitInfo; - struct PushConstantRange; - struct QueryPoolCreateInfo; - struct QueryPoolCreateInfoINTEL; - struct QueryPoolPerformanceCreateInfoKHR; - struct QueueFamilyCheckpointPropertiesNV; - struct QueueFamilyProperties; - struct QueueFamilyProperties2; - using QueueFamilyProperties2KHR = QueueFamilyProperties2; - struct RayTracingPipelineCreateInfoNV; - struct RayTracingShaderGroupCreateInfoNV; - struct Rect2D; - struct RectLayerKHR; - struct RefreshCycleDurationGOOGLE; - struct RenderPassAttachmentBeginInfo; - using RenderPassAttachmentBeginInfoKHR = RenderPassAttachmentBeginInfo; - struct RenderPassBeginInfo; - struct RenderPassCreateInfo; - struct RenderPassCreateInfo2; - using RenderPassCreateInfo2KHR = RenderPassCreateInfo2; - struct RenderPassFragmentDensityMapCreateInfoEXT; - struct RenderPassInputAttachmentAspectCreateInfo; - using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo; - struct RenderPassMultiviewCreateInfo; - using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo; - struct RenderPassSampleLocationsBeginInfoEXT; - struct SampleLocationEXT; - struct SampleLocationsInfoEXT; - struct SamplerCreateInfo; - struct SamplerReductionModeCreateInfo; - using SamplerReductionModeCreateInfoEXT = SamplerReductionModeCreateInfo; - struct SamplerYcbcrConversionCreateInfo; - using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo; - struct SamplerYcbcrConversionImageFormatProperties; - using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties; - struct SamplerYcbcrConversionInfo; - using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo; - struct SemaphoreCreateInfo; - struct SemaphoreGetFdInfoKHR; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct SemaphoreGetWin32HandleInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct SemaphoreSignalInfo; - using SemaphoreSignalInfoKHR = SemaphoreSignalInfo; - struct SemaphoreTypeCreateInfo; - using SemaphoreTypeCreateInfoKHR = SemaphoreTypeCreateInfo; - struct SemaphoreWaitInfo; - using SemaphoreWaitInfoKHR = SemaphoreWaitInfo; - struct ShaderModuleCreateInfo; - struct ShaderModuleValidationCacheCreateInfoEXT; - struct ShaderResourceUsageAMD; - struct ShaderStatisticsInfoAMD; - struct ShadingRatePaletteNV; - struct SharedPresentSurfaceCapabilitiesKHR; - struct SparseBufferMemoryBindInfo; - struct SparseImageFormatProperties; - struct SparseImageFormatProperties2; - using SparseImageFormatProperties2KHR = SparseImageFormatProperties2; - struct SparseImageMemoryBind; - struct SparseImageMemoryBindInfo; - struct SparseImageMemoryRequirements; - struct SparseImageMemoryRequirements2; - using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2; - struct SparseImageOpaqueMemoryBindInfo; - struct SparseMemoryBind; - struct SpecializationInfo; - struct SpecializationMapEntry; - struct StencilOpState; -#ifdef VK_USE_PLATFORM_GGP - struct StreamDescriptorSurfaceCreateInfoGGP; -#endif /*VK_USE_PLATFORM_GGP*/ - struct SubmitInfo; - struct SubpassBeginInfo; - using SubpassBeginInfoKHR = SubpassBeginInfo; - struct SubpassDependency; - struct SubpassDependency2; - using SubpassDependency2KHR = SubpassDependency2; - struct SubpassDescription; - struct SubpassDescription2; - using SubpassDescription2KHR = SubpassDescription2; - struct SubpassDescriptionDepthStencilResolve; - using SubpassDescriptionDepthStencilResolveKHR = SubpassDescriptionDepthStencilResolve; - struct SubpassEndInfo; - using SubpassEndInfoKHR = SubpassEndInfo; - struct SubpassSampleLocationsEXT; - struct SubresourceLayout; - struct SurfaceCapabilities2EXT; - struct SurfaceCapabilities2KHR; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct SurfaceCapabilitiesFullScreenExclusiveEXT; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct SurfaceCapabilitiesKHR; - struct SurfaceFormat2KHR; - struct SurfaceFormatKHR; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct SurfaceFullScreenExclusiveInfoEXT; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct SurfaceFullScreenExclusiveWin32InfoEXT; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct SurfaceProtectedCapabilitiesKHR; - struct SwapchainCounterCreateInfoEXT; - struct SwapchainCreateInfoKHR; - struct SwapchainDisplayNativeHdrCreateInfoAMD; - struct TextureLODGatherFormatPropertiesAMD; - struct TimelineSemaphoreSubmitInfo; - using TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo; - struct ValidationCacheCreateInfoEXT; - struct ValidationFeaturesEXT; - struct ValidationFlagsEXT; - struct VertexInputAttributeDescription; - struct VertexInputBindingDescription; - struct VertexInputBindingDivisorDescriptionEXT; -#ifdef VK_USE_PLATFORM_VI_NN - struct ViSurfaceCreateInfoNN; -#endif /*VK_USE_PLATFORM_VI_NN*/ - struct Viewport; - struct ViewportSwizzleNV; - struct ViewportWScalingNV; -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - struct WaylandSurfaceCreateInfoKHR; -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct Win32KeyedMutexAcquireReleaseInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct Win32KeyedMutexAcquireReleaseInfoNV; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct Win32SurfaceCreateInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct WriteDescriptorSet; - struct WriteDescriptorSetAccelerationStructureNV; - struct WriteDescriptorSetInlineUniformBlockEXT; - struct XYColorEXT; -#ifdef VK_USE_PLATFORM_XCB_KHR - struct XcbSurfaceCreateInfoKHR; -#endif /*VK_USE_PLATFORM_XCB_KHR*/ -#ifdef VK_USE_PLATFORM_XLIB_KHR - struct XlibSurfaceCreateInfoKHR; -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + struct AabbPositionsKHR + { - class SurfaceKHR + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AabbPositionsKHR(float minX_ = {}, float minY_ = {}, float minZ_ = {}, float maxX_ = {}, float maxY_ = {}, float maxZ_ = {}) VULKAN_HPP_NOEXCEPT + : minX( minX_ ), minY( minY_ ), minZ( minZ_ ), maxX( maxX_ ), maxY( maxY_ ), maxZ( maxZ_ ) + {} + + VULKAN_HPP_CONSTEXPR AabbPositionsKHR( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AabbPositionsKHR( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + AabbPositionsKHR & operator=( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AabbPositionsKHR & operator=( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AabbPositionsKHR ) ); + return *this; + } + + AabbPositionsKHR & setMinX( float minX_ ) VULKAN_HPP_NOEXCEPT + { + minX = minX_; + return *this; + } + + AabbPositionsKHR & setMinY( float minY_ ) VULKAN_HPP_NOEXCEPT + { + minY = minY_; + return *this; + } + + AabbPositionsKHR & setMinZ( float minZ_ ) VULKAN_HPP_NOEXCEPT + { + minZ = minZ_; + return *this; + } + + AabbPositionsKHR & setMaxX( float maxX_ ) VULKAN_HPP_NOEXCEPT + { + maxX = maxX_; + return *this; + } + + AabbPositionsKHR & setMaxY( float maxY_ ) VULKAN_HPP_NOEXCEPT + { + maxY = maxY_; + return *this; + } + + AabbPositionsKHR & setMaxZ( float maxZ_ ) VULKAN_HPP_NOEXCEPT + { + maxZ = maxZ_; + return *this; + } + + + operator VkAabbPositionsKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAabbPositionsKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AabbPositionsKHR const& ) const = default; +#else + bool operator==( AabbPositionsKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( minX == rhs.minX ) + && ( minY == rhs.minY ) + && ( minZ == rhs.minZ ) + && ( maxX == rhs.maxX ) + && ( maxY == rhs.maxY ) + && ( maxZ == rhs.maxZ ); + } + + bool operator!=( AabbPositionsKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + float minX = {}; + float minY = {}; + float minZ = {}; + float maxX = {}; + float maxY = {}; + float maxZ = {}; + + }; + static_assert( sizeof( AabbPositionsKHR ) == sizeof( VkAabbPositionsKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + using AabbPositionsNV = AabbPositionsKHR; + + class AccelerationStructureKHR { public: - using CType = VkSurfaceKHR; + using CType = VkAccelerationStructureKHR; - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSurfaceKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR; public: - VULKAN_HPP_CONSTEXPR SurfaceKHR() VULKAN_HPP_NOEXCEPT - : m_surfaceKHR(VK_NULL_HANDLE) + VULKAN_HPP_CONSTEXPR AccelerationStructureKHR() VULKAN_HPP_NOEXCEPT + : m_accelerationStructureKHR(VK_NULL_HANDLE) {} - VULKAN_HPP_CONSTEXPR SurfaceKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_surfaceKHR(VK_NULL_HANDLE) + VULKAN_HPP_CONSTEXPR AccelerationStructureKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_accelerationStructureKHR(VK_NULL_HANDLE) {} - VULKAN_HPP_TYPESAFE_EXPLICIT SurfaceKHR( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT - : m_surfaceKHR( surfaceKHR ) + VULKAN_HPP_TYPESAFE_EXPLICIT AccelerationStructureKHR( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT + : m_accelerationStructureKHR( accelerationStructureKHR ) {} #if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - SurfaceKHR & operator=(VkSurfaceKHR surfaceKHR) VULKAN_HPP_NOEXCEPT + AccelerationStructureKHR & operator=(VkAccelerationStructureKHR accelerationStructureKHR) VULKAN_HPP_NOEXCEPT { - m_surfaceKHR = surfaceKHR; + m_accelerationStructureKHR = accelerationStructureKHR; return *this; } #endif - SurfaceKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + AccelerationStructureKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT { - m_surfaceKHR = VK_NULL_HANDLE; + m_accelerationStructureKHR = VK_NULL_HANDLE; return *this; } - bool operator==( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AccelerationStructureKHR const& ) const = default; +#else + bool operator==( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return m_surfaceKHR == rhs.m_surfaceKHR; + return m_accelerationStructureKHR == rhs.m_accelerationStructureKHR; } - bool operator!=(SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=(AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return m_surfaceKHR != rhs.m_surfaceKHR; + return m_accelerationStructureKHR != rhs.m_accelerationStructureKHR; } - bool operator<(SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator<(AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return m_surfaceKHR < rhs.m_surfaceKHR; + return m_accelerationStructureKHR < rhs.m_accelerationStructureKHR; } +#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSurfaceKHR() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureKHR() const VULKAN_HPP_NOEXCEPT { - return m_surfaceKHR; + return m_accelerationStructureKHR; } explicit operator bool() const VULKAN_HPP_NOEXCEPT { - return m_surfaceKHR != VK_NULL_HANDLE; + return m_accelerationStructureKHR != VK_NULL_HANDLE; } bool operator!() const VULKAN_HPP_NOEXCEPT { - return m_surfaceKHR == VK_NULL_HANDLE; + return m_accelerationStructureKHR == VK_NULL_HANDLE; } private: - VkSurfaceKHR m_surfaceKHR; + VkAccelerationStructureKHR m_accelerationStructureKHR; }; - static_assert( sizeof( SurfaceKHR ) == sizeof( VkSurfaceKHR ), "handle and wrapper have different size!" ); + static_assert( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR ) == sizeof( VkAccelerationStructureKHR ), "handle and wrapper have different size!" ); template <> - struct cpp_type + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type { - using type = SurfaceKHR; + using type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; }; - class DebugReportCallbackEXT + template <> + struct CppType { - public: - using CType = VkDebugReportCallbackEXT; + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; + }; - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDebugReportCallbackEXT; - public: - VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT() VULKAN_HPP_NOEXCEPT - : m_debugReportCallbackEXT(VK_NULL_HANDLE) - {} + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; + }; - VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_debugReportCallbackEXT(VK_NULL_HANDLE) - {} - VULKAN_HPP_TYPESAFE_EXPLICIT DebugReportCallbackEXT( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT - : m_debugReportCallbackEXT( debugReportCallbackEXT ) - {} + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + using AccelerationStructureNV = AccelerationStructureKHR; -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DebugReportCallbackEXT & operator=(VkDebugReportCallbackEXT debugReportCallbackEXT) VULKAN_HPP_NOEXCEPT +#ifdef VK_ENABLE_BETA_EXTENSIONS + union DeviceOrHostAddressConstKHR + { + DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const& rhs ) VULKAN_HPP_NOEXCEPT { - m_debugReportCallbackEXT = debugReportCallbackEXT; + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR ) ); + } + + DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) + : deviceAddress( deviceAddress_ ) + {} + + DeviceOrHostAddressConstKHR( const void* hostAddress_ ) + : hostAddress( hostAddress_ ) + {} + + DeviceOrHostAddressConstKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; return *this; } + + DeviceOrHostAddressConstKHR & setHostAddress( const void* hostAddress_ ) VULKAN_HPP_NOEXCEPT + { + hostAddress = hostAddress_; + return *this; + } + + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR & operator=( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR ) ); + return *this; + } + + operator VkDeviceOrHostAddressConstKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkDeviceOrHostAddressConstKHR &() + { + return *reinterpret_cast(this); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress; + const void* hostAddress; +#else + VkDeviceAddress deviceAddress; + const void* hostAddress; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureGeometryTrianglesDataKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + AccelerationStructureGeometryTrianglesDataKHR(VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData_ = {}) VULKAN_HPP_NOEXCEPT + : vertexFormat( vertexFormat_ ), vertexData( vertexData_ ), vertexStride( vertexStride_ ), indexType( indexType_ ), indexData( indexData_ ), transformData( transformData_ ) + {} + + AccelerationStructureGeometryTrianglesDataKHR( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryTrianglesDataKHR( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + AccelerationStructureGeometryTrianglesDataKHR & operator=( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AccelerationStructureGeometryTrianglesDataKHR & operator=( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AccelerationStructureGeometryTrianglesDataKHR ) ); + return *this; + } + + AccelerationStructureGeometryTrianglesDataKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureGeometryTrianglesDataKHR & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT + { + vertexFormat = vertexFormat_; + return *this; + } + + AccelerationStructureGeometryTrianglesDataKHR & setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT + { + vertexData = vertexData_; + return *this; + } + + AccelerationStructureGeometryTrianglesDataKHR & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexStride = vertexStride_; + return *this; + } + + AccelerationStructureGeometryTrianglesDataKHR & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + AccelerationStructureGeometryTrianglesDataKHR & setIndexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT + { + indexData = indexData_; + return *this; + } + + AccelerationStructureGeometryTrianglesDataKHR & setTransformData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & transformData_ ) VULKAN_HPP_NOEXCEPT + { + transformData = transformData_; + return *this; + } + + + operator VkAccelerationStructureGeometryTrianglesDataKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryTrianglesDataKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData = {}; + + }; + static_assert( sizeof( AccelerationStructureGeometryTrianglesDataKHR ) == sizeof( VkAccelerationStructureGeometryTrianglesDataKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureGeometryTrianglesDataKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureGeometryAabbsDataKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryAabbsDataKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + AccelerationStructureGeometryAabbsDataKHR(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}) VULKAN_HPP_NOEXCEPT + : data( data_ ), stride( stride_ ) + {} + + AccelerationStructureGeometryAabbsDataKHR( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryAabbsDataKHR( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + AccelerationStructureGeometryAabbsDataKHR & operator=( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AccelerationStructureGeometryAabbsDataKHR & operator=( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AccelerationStructureGeometryAabbsDataKHR ) ); + return *this; + } + + AccelerationStructureGeometryAabbsDataKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureGeometryAabbsDataKHR & setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } + + AccelerationStructureGeometryAabbsDataKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + + operator VkAccelerationStructureGeometryAabbsDataKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryAabbsDataKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryAabbsDataKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {}; + VULKAN_HPP_NAMESPACE::DeviceSize stride = {}; + + }; + static_assert( sizeof( AccelerationStructureGeometryAabbsDataKHR ) == sizeof( VkAccelerationStructureGeometryAabbsDataKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureGeometryAabbsDataKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureGeometryInstancesDataKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryInstancesDataKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + AccelerationStructureGeometryInstancesDataKHR(VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}) VULKAN_HPP_NOEXCEPT + : arrayOfPointers( arrayOfPointers_ ), data( data_ ) + {} + + AccelerationStructureGeometryInstancesDataKHR( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryInstancesDataKHR( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + AccelerationStructureGeometryInstancesDataKHR & operator=( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AccelerationStructureGeometryInstancesDataKHR & operator=( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AccelerationStructureGeometryInstancesDataKHR ) ); + return *this; + } + + AccelerationStructureGeometryInstancesDataKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureGeometryInstancesDataKHR & setArrayOfPointers( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ ) VULKAN_HPP_NOEXCEPT + { + arrayOfPointers = arrayOfPointers_; + return *this; + } + + AccelerationStructureGeometryInstancesDataKHR & setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } + + + operator VkAccelerationStructureGeometryInstancesDataKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryInstancesDataKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryInstancesDataKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {}; + + }; + static_assert( sizeof( AccelerationStructureGeometryInstancesDataKHR ) == sizeof( VkAccelerationStructureGeometryInstancesDataKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureGeometryInstancesDataKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + union AccelerationStructureGeometryDataKHR + { + AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const& rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR ) ); + } + + AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles_ = {} ) + : triangles( triangles_ ) + {} + + AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs_ ) + : aabbs( aabbs_ ) + {} + + AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances_ ) + : instances( instances_ ) + {} + + AccelerationStructureGeometryDataKHR & setTriangles( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const & triangles_ ) VULKAN_HPP_NOEXCEPT + { + triangles = triangles_; + return *this; + } + + AccelerationStructureGeometryDataKHR & setAabbs( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const & aabbs_ ) VULKAN_HPP_NOEXCEPT + { + aabbs = aabbs_; + return *this; + } + + AccelerationStructureGeometryDataKHR & setInstances( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const & instances_ ) VULKAN_HPP_NOEXCEPT + { + instances = instances_; + return *this; + } + + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR & operator=( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR ) ); + return *this; + } + + operator VkAccelerationStructureGeometryDataKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkAccelerationStructureGeometryDataKHR &() + { + return *reinterpret_cast(this); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles; + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs; + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances; +#else + VkAccelerationStructureGeometryTrianglesDataKHR triangles; + VkAccelerationStructureGeometryAabbsDataKHR aabbs; + VkAccelerationStructureGeometryInstancesDataKHR instances; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureGeometryKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + AccelerationStructureGeometryKHR(VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry_ = {}, VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {}) VULKAN_HPP_NOEXCEPT + : geometryType( geometryType_ ), geometry( geometry_ ), flags( flags_ ) + {} + + AccelerationStructureGeometryKHR( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryKHR( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + AccelerationStructureGeometryKHR & operator=( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AccelerationStructureGeometryKHR & operator=( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AccelerationStructureGeometryKHR ) ); + return *this; + } + + AccelerationStructureGeometryKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureGeometryKHR & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT + { + geometryType = geometryType_; + return *this; + } + + AccelerationStructureGeometryKHR & setGeometry( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & geometry_ ) VULKAN_HPP_NOEXCEPT + { + geometry = geometry_; + return *this; + } + + AccelerationStructureGeometryKHR & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + + operator VkAccelerationStructureGeometryKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles; + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry = {}; + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {}; + + }; + static_assert( sizeof( AccelerationStructureGeometryKHR ) == sizeof( VkAccelerationStructureGeometryKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureGeometryKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + union DeviceOrHostAddressKHR + { + DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const& rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR ) ); + } + + DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) + : deviceAddress( deviceAddress_ ) + {} + + DeviceOrHostAddressKHR( void* hostAddress_ ) + : hostAddress( hostAddress_ ) + {} + + DeviceOrHostAddressKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } + + DeviceOrHostAddressKHR & setHostAddress( void* hostAddress_ ) VULKAN_HPP_NOEXCEPT + { + hostAddress = hostAddress_; + return *this; + } + + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR & operator=( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR ) ); + return *this; + } + + operator VkDeviceOrHostAddressKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkDeviceOrHostAddressKHR &() + { + return *reinterpret_cast(this); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress; + void* hostAddress; +#else + VkDeviceAddress deviceAddress; + void* hostAddress; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureBuildGeometryInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureBuildGeometryInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + AccelerationStructureBuildGeometryInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 update_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ = {}, VULKAN_HPP_NAMESPACE::Bool32 geometryArrayOfPointers_ = {}, uint32_t geometryCount_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR* const * ppGeometries_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {}) VULKAN_HPP_NOEXCEPT + : type( type_ ), flags( flags_ ), update( update_ ), srcAccelerationStructure( srcAccelerationStructure_ ), dstAccelerationStructure( dstAccelerationStructure_ ), geometryArrayOfPointers( geometryArrayOfPointers_ ), geometryCount( geometryCount_ ), ppGeometries( ppGeometries_ ), scratchData( scratchData_ ) + {} + + AccelerationStructureBuildGeometryInfoKHR( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureBuildGeometryInfoKHR( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + AccelerationStructureBuildGeometryInfoKHR & operator=( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & operator=( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AccelerationStructureBuildGeometryInfoKHR ) ); + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & setUpdate( VULKAN_HPP_NAMESPACE::Bool32 update_ ) VULKAN_HPP_NOEXCEPT + { + update = update_; + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & setSrcAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + srcAccelerationStructure = srcAccelerationStructure_; + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & setDstAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + dstAccelerationStructure = dstAccelerationStructure_; + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & setGeometryArrayOfPointers( VULKAN_HPP_NAMESPACE::Bool32 geometryArrayOfPointers_ ) VULKAN_HPP_NOEXCEPT + { + geometryArrayOfPointers = geometryArrayOfPointers_; + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT + { + geometryCount = geometryCount_; + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & setPpGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR* const * ppGeometries_ ) VULKAN_HPP_NOEXCEPT + { + ppGeometries = ppGeometries_; + return *this; + } + + AccelerationStructureBuildGeometryInfoKHR & setScratchData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT + { + scratchData = scratchData_; + return *this; + } + + + operator VkAccelerationStructureBuildGeometryInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureBuildGeometryInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildGeometryInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel; + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 update = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure = {}; + VULKAN_HPP_NAMESPACE::Bool32 geometryArrayOfPointers = {}; + uint32_t geometryCount = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR* const * ppGeometries = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData = {}; + + }; + static_assert( sizeof( AccelerationStructureBuildGeometryInfoKHR ) == sizeof( VkAccelerationStructureBuildGeometryInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureBuildGeometryInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureBuildOffsetInfoKHR + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureBuildOffsetInfoKHR(uint32_t primitiveCount_ = {}, uint32_t primitiveOffset_ = {}, uint32_t firstVertex_ = {}, uint32_t transformOffset_ = {}) VULKAN_HPP_NOEXCEPT + : primitiveCount( primitiveCount_ ), primitiveOffset( primitiveOffset_ ), firstVertex( firstVertex_ ), transformOffset( transformOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR AccelerationStructureBuildOffsetInfoKHR( AccelerationStructureBuildOffsetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureBuildOffsetInfoKHR( VkAccelerationStructureBuildOffsetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + AccelerationStructureBuildOffsetInfoKHR & operator=( VkAccelerationStructureBuildOffsetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AccelerationStructureBuildOffsetInfoKHR & operator=( AccelerationStructureBuildOffsetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AccelerationStructureBuildOffsetInfoKHR ) ); + return *this; + } + + AccelerationStructureBuildOffsetInfoKHR & setPrimitiveCount( uint32_t primitiveCount_ ) VULKAN_HPP_NOEXCEPT + { + primitiveCount = primitiveCount_; + return *this; + } + + AccelerationStructureBuildOffsetInfoKHR & setPrimitiveOffset( uint32_t primitiveOffset_ ) VULKAN_HPP_NOEXCEPT + { + primitiveOffset = primitiveOffset_; + return *this; + } + + AccelerationStructureBuildOffsetInfoKHR & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT + { + firstVertex = firstVertex_; + return *this; + } + + AccelerationStructureBuildOffsetInfoKHR & setTransformOffset( uint32_t transformOffset_ ) VULKAN_HPP_NOEXCEPT + { + transformOffset = transformOffset_; + return *this; + } + + + operator VkAccelerationStructureBuildOffsetInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureBuildOffsetInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AccelerationStructureBuildOffsetInfoKHR const& ) const = default; +#else + bool operator==( AccelerationStructureBuildOffsetInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( primitiveCount == rhs.primitiveCount ) + && ( primitiveOffset == rhs.primitiveOffset ) + && ( firstVertex == rhs.firstVertex ) + && ( transformOffset == rhs.transformOffset ); + } + + bool operator!=( AccelerationStructureBuildOffsetInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } #endif - DebugReportCallbackEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + + + public: + uint32_t primitiveCount = {}; + uint32_t primitiveOffset = {}; + uint32_t firstVertex = {}; + uint32_t transformOffset = {}; + + }; + static_assert( sizeof( AccelerationStructureBuildOffsetInfoKHR ) == sizeof( VkAccelerationStructureBuildOffsetInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureCreateGeometryTypeInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateGeometryTypeInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateGeometryTypeInfoKHR(VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, uint32_t maxPrimitiveCount_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, uint32_t maxVertexCount_ = {}, VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Bool32 allowsTransforms_ = {}) VULKAN_HPP_NOEXCEPT + : geometryType( geometryType_ ), maxPrimitiveCount( maxPrimitiveCount_ ), indexType( indexType_ ), maxVertexCount( maxVertexCount_ ), vertexFormat( vertexFormat_ ), allowsTransforms( allowsTransforms_ ) + {} + + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateGeometryTypeInfoKHR( AccelerationStructureCreateGeometryTypeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureCreateGeometryTypeInfoKHR( VkAccelerationStructureCreateGeometryTypeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - m_debugReportCallbackEXT = VK_NULL_HANDLE; + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + AccelerationStructureCreateGeometryTypeInfoKHR & operator=( VkAccelerationStructureCreateGeometryTypeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } - bool operator==( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateGeometryTypeInfoKHR & operator=( AccelerationStructureCreateGeometryTypeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - return m_debugReportCallbackEXT == rhs.m_debugReportCallbackEXT; - } - - bool operator!=(DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_debugReportCallbackEXT != rhs.m_debugReportCallbackEXT; - } - - bool operator<(DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_debugReportCallbackEXT < rhs.m_debugReportCallbackEXT; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugReportCallbackEXT() const VULKAN_HPP_NOEXCEPT - { - return m_debugReportCallbackEXT; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_debugReportCallbackEXT != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_debugReportCallbackEXT == VK_NULL_HANDLE; - } - - private: - VkDebugReportCallbackEXT m_debugReportCallbackEXT; - }; - static_assert( sizeof( DebugReportCallbackEXT ) == sizeof( VkDebugReportCallbackEXT ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = DebugReportCallbackEXT; - }; - - class DebugUtilsMessengerEXT - { - public: - using CType = VkDebugUtilsMessengerEXT; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDebugUtilsMessengerEXT; - - public: - VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT() VULKAN_HPP_NOEXCEPT - : m_debugUtilsMessengerEXT(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_debugUtilsMessengerEXT(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT DebugUtilsMessengerEXT( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT - : m_debugUtilsMessengerEXT( debugUtilsMessengerEXT ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DebugUtilsMessengerEXT & operator=(VkDebugUtilsMessengerEXT debugUtilsMessengerEXT) VULKAN_HPP_NOEXCEPT - { - m_debugUtilsMessengerEXT = debugUtilsMessengerEXT; + memcpy( static_cast( this ), &rhs, sizeof( AccelerationStructureCreateGeometryTypeInfoKHR ) ); return *this; } + + AccelerationStructureCreateGeometryTypeInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureCreateGeometryTypeInfoKHR & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT + { + geometryType = geometryType_; + return *this; + } + + AccelerationStructureCreateGeometryTypeInfoKHR & setMaxPrimitiveCount( uint32_t maxPrimitiveCount_ ) VULKAN_HPP_NOEXCEPT + { + maxPrimitiveCount = maxPrimitiveCount_; + return *this; + } + + AccelerationStructureCreateGeometryTypeInfoKHR & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + AccelerationStructureCreateGeometryTypeInfoKHR & setMaxVertexCount( uint32_t maxVertexCount_ ) VULKAN_HPP_NOEXCEPT + { + maxVertexCount = maxVertexCount_; + return *this; + } + + AccelerationStructureCreateGeometryTypeInfoKHR & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT + { + vertexFormat = vertexFormat_; + return *this; + } + + AccelerationStructureCreateGeometryTypeInfoKHR & setAllowsTransforms( VULKAN_HPP_NAMESPACE::Bool32 allowsTransforms_ ) VULKAN_HPP_NOEXCEPT + { + allowsTransforms = allowsTransforms_; + return *this; + } + + + operator VkAccelerationStructureCreateGeometryTypeInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureCreateGeometryTypeInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AccelerationStructureCreateGeometryTypeInfoKHR const& ) const = default; +#else + bool operator==( AccelerationStructureCreateGeometryTypeInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( geometryType == rhs.geometryType ) + && ( maxPrimitiveCount == rhs.maxPrimitiveCount ) + && ( indexType == rhs.indexType ) + && ( maxVertexCount == rhs.maxVertexCount ) + && ( vertexFormat == rhs.vertexFormat ) + && ( allowsTransforms == rhs.allowsTransforms ); + } + + bool operator!=( AccelerationStructureCreateGeometryTypeInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } #endif - DebugUtilsMessengerEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_debugUtilsMessengerEXT = VK_NULL_HANDLE; - return *this; - } - bool operator==( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_debugUtilsMessengerEXT == rhs.m_debugUtilsMessengerEXT; - } - bool operator!=(DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_debugUtilsMessengerEXT != rhs.m_debugUtilsMessengerEXT; - } + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateGeometryTypeInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles; + uint32_t maxPrimitiveCount = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + uint32_t maxVertexCount = {}; + VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Bool32 allowsTransforms = {}; - bool operator<(DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_debugUtilsMessengerEXT < rhs.m_debugUtilsMessengerEXT; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugUtilsMessengerEXT() const VULKAN_HPP_NOEXCEPT - { - return m_debugUtilsMessengerEXT; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_debugUtilsMessengerEXT != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_debugUtilsMessengerEXT == VK_NULL_HANDLE; - } - - private: - VkDebugUtilsMessengerEXT m_debugUtilsMessengerEXT; }; - static_assert( sizeof( DebugUtilsMessengerEXT ) == sizeof( VkDebugUtilsMessengerEXT ), "handle and wrapper have different size!" ); + static_assert( sizeof( AccelerationStructureCreateGeometryTypeInfoKHR ) == sizeof( VkAccelerationStructureCreateGeometryTypeInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> - struct cpp_type + struct CppType { - using type = DebugUtilsMessengerEXT; + using Type = AccelerationStructureCreateGeometryTypeInfoKHR; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - class DisplayKHR +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureCreateInfoKHR { - public: - using CType = VkDisplayKHR; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateInfoKHR; - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDisplayKHR; - - public: - VULKAN_HPP_CONSTEXPR DisplayKHR() VULKAN_HPP_NOEXCEPT - : m_displayKHR(VK_NULL_HANDLE) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR(VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, uint32_t maxGeometryCount_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateGeometryTypeInfoKHR* pGeometryInfos_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}) VULKAN_HPP_NOEXCEPT + : compactedSize( compactedSize_ ), type( type_ ), flags( flags_ ), maxGeometryCount( maxGeometryCount_ ), pGeometryInfos( pGeometryInfos_ ), deviceAddress( deviceAddress_ ) {} - VULKAN_HPP_CONSTEXPR DisplayKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_displayKHR(VK_NULL_HANDLE) - {} + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT - : m_displayKHR( displayKHR ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DisplayKHR & operator=(VkDisplayKHR displayKHR) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateInfoKHR( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - m_displayKHR = displayKHR; + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + AccelerationStructureCreateInfoKHR( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_, VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & geometryInfos_, VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) + : compactedSize( compactedSize_ ), type( type_ ), flags( flags_ ), maxGeometryCount( static_cast( geometryInfos_.size() ) ), pGeometryInfos( geometryInfos_.data() ), deviceAddress( deviceAddress_ ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + AccelerationStructureCreateInfoKHR & operator=( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } + + AccelerationStructureCreateInfoKHR & operator=( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AccelerationStructureCreateInfoKHR ) ); + return *this; + } + + AccelerationStructureCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureCreateInfoKHR & setCompactedSize( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT + { + compactedSize = compactedSize_; + return *this; + } + + AccelerationStructureCreateInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + AccelerationStructureCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + AccelerationStructureCreateInfoKHR & setMaxGeometryCount( uint32_t maxGeometryCount_ ) VULKAN_HPP_NOEXCEPT + { + maxGeometryCount = maxGeometryCount_; + return *this; + } + + AccelerationStructureCreateInfoKHR & setPGeometryInfos( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateGeometryTypeInfoKHR* pGeometryInfos_ ) VULKAN_HPP_NOEXCEPT + { + pGeometryInfos = pGeometryInfos_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + AccelerationStructureCreateInfoKHR & setGeometryInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & geometryInfos_ ) VULKAN_HPP_NOEXCEPT + { + maxGeometryCount = static_cast( geometryInfos_.size() ); + pGeometryInfos = geometryInfos_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + AccelerationStructureCreateInfoKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } + + + operator VkAccelerationStructureCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AccelerationStructureCreateInfoKHR const& ) const = default; +#else + bool operator==( AccelerationStructureCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( compactedSize == rhs.compactedSize ) + && ( type == rhs.type ) + && ( flags == rhs.flags ) + && ( maxGeometryCount == rhs.maxGeometryCount ) + && ( pGeometryInfos == rhs.pGeometryInfos ) + && ( deviceAddress == rhs.deviceAddress ); + } + + bool operator!=( AccelerationStructureCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } #endif - DisplayKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_displayKHR = VK_NULL_HANDLE; - return *this; - } - bool operator==( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_displayKHR == rhs.m_displayKHR; - } - bool operator!=(DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_displayKHR != rhs.m_displayKHR; - } + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize compactedSize = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel; + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {}; + uint32_t maxGeometryCount = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateGeometryTypeInfoKHR* pGeometryInfos = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; - bool operator<(DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_displayKHR < rhs.m_displayKHR; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayKHR() const VULKAN_HPP_NOEXCEPT - { - return m_displayKHR; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_displayKHR != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_displayKHR == VK_NULL_HANDLE; - } - - private: - VkDisplayKHR m_displayKHR; }; - static_assert( sizeof( DisplayKHR ) == sizeof( VkDisplayKHR ), "handle and wrapper have different size!" ); + static_assert( sizeof( AccelerationStructureCreateInfoKHR ) == sizeof( VkAccelerationStructureCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> - struct cpp_type + struct CppType { - using type = DisplayKHR; - }; - - class SwapchainKHR - { - public: - using CType = VkSwapchainKHR; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSwapchainKHR; - - public: - VULKAN_HPP_CONSTEXPR SwapchainKHR() VULKAN_HPP_NOEXCEPT - : m_swapchainKHR(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR SwapchainKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_swapchainKHR(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT SwapchainKHR( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT - : m_swapchainKHR( swapchainKHR ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - SwapchainKHR & operator=(VkSwapchainKHR swapchainKHR) VULKAN_HPP_NOEXCEPT - { - m_swapchainKHR = swapchainKHR; - return *this; - } -#endif - - SwapchainKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_swapchainKHR = VK_NULL_HANDLE; - return *this; - } - - bool operator==( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_swapchainKHR == rhs.m_swapchainKHR; - } - - bool operator!=(SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_swapchainKHR != rhs.m_swapchainKHR; - } - - bool operator<(SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_swapchainKHR < rhs.m_swapchainKHR; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSwapchainKHR() const VULKAN_HPP_NOEXCEPT - { - return m_swapchainKHR; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_swapchainKHR != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_swapchainKHR == VK_NULL_HANDLE; - } - - private: - VkSwapchainKHR m_swapchainKHR; - }; - static_assert( sizeof( SwapchainKHR ) == sizeof( VkSwapchainKHR ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = SwapchainKHR; - }; - - class Semaphore - { - public: - using CType = VkSemaphore; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSemaphore; - - public: - VULKAN_HPP_CONSTEXPR Semaphore() VULKAN_HPP_NOEXCEPT - : m_semaphore(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Semaphore( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_semaphore(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Semaphore( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT - : m_semaphore( semaphore ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Semaphore & operator=(VkSemaphore semaphore) VULKAN_HPP_NOEXCEPT - { - m_semaphore = semaphore; - return *this; - } -#endif - - Semaphore & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_semaphore = VK_NULL_HANDLE; - return *this; - } - - bool operator==( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_semaphore == rhs.m_semaphore; - } - - bool operator!=(Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_semaphore != rhs.m_semaphore; - } - - bool operator<(Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_semaphore < rhs.m_semaphore; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSemaphore() const VULKAN_HPP_NOEXCEPT - { - return m_semaphore; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_semaphore != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_semaphore == VK_NULL_HANDLE; - } - - private: - VkSemaphore m_semaphore; - }; - static_assert( sizeof( Semaphore ) == sizeof( VkSemaphore ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = Semaphore; - }; - - class Fence - { - public: - using CType = VkFence; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eFence; - - public: - VULKAN_HPP_CONSTEXPR Fence() VULKAN_HPP_NOEXCEPT - : m_fence(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Fence( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_fence(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Fence( VkFence fence ) VULKAN_HPP_NOEXCEPT - : m_fence( fence ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Fence & operator=(VkFence fence) VULKAN_HPP_NOEXCEPT - { - m_fence = fence; - return *this; - } -#endif - - Fence & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_fence = VK_NULL_HANDLE; - return *this; - } - - bool operator==( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_fence == rhs.m_fence; - } - - bool operator!=(Fence const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_fence != rhs.m_fence; - } - - bool operator<(Fence const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_fence < rhs.m_fence; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFence() const VULKAN_HPP_NOEXCEPT - { - return m_fence; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_fence != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_fence == VK_NULL_HANDLE; - } - - private: - VkFence m_fence; - }; - static_assert( sizeof( Fence ) == sizeof( VkFence ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = Fence; - }; - - class PerformanceConfigurationINTEL - { - public: - using CType = VkPerformanceConfigurationINTEL; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePerformanceConfigurationINTEL; - - public: - VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL() VULKAN_HPP_NOEXCEPT - : m_performanceConfigurationINTEL(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_performanceConfigurationINTEL(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT PerformanceConfigurationINTEL( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT - : m_performanceConfigurationINTEL( performanceConfigurationINTEL ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - PerformanceConfigurationINTEL & operator=(VkPerformanceConfigurationINTEL performanceConfigurationINTEL) VULKAN_HPP_NOEXCEPT - { - m_performanceConfigurationINTEL = performanceConfigurationINTEL; - return *this; - } -#endif - - PerformanceConfigurationINTEL & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_performanceConfigurationINTEL = VK_NULL_HANDLE; - return *this; - } - - bool operator==( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_performanceConfigurationINTEL == rhs.m_performanceConfigurationINTEL; - } - - bool operator!=(PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_performanceConfigurationINTEL != rhs.m_performanceConfigurationINTEL; - } - - bool operator<(PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_performanceConfigurationINTEL < rhs.m_performanceConfigurationINTEL; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPerformanceConfigurationINTEL() const VULKAN_HPP_NOEXCEPT - { - return m_performanceConfigurationINTEL; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_performanceConfigurationINTEL != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_performanceConfigurationINTEL == VK_NULL_HANDLE; - } - - private: - VkPerformanceConfigurationINTEL m_performanceConfigurationINTEL; - }; - static_assert( sizeof( PerformanceConfigurationINTEL ) == sizeof( VkPerformanceConfigurationINTEL ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = PerformanceConfigurationINTEL; - }; - - class QueryPool - { - public: - using CType = VkQueryPool; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eQueryPool; - - public: - VULKAN_HPP_CONSTEXPR QueryPool() VULKAN_HPP_NOEXCEPT - : m_queryPool(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_queryPool(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT - : m_queryPool( queryPool ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - QueryPool & operator=(VkQueryPool queryPool) VULKAN_HPP_NOEXCEPT - { - m_queryPool = queryPool; - return *this; - } -#endif - - QueryPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_queryPool = VK_NULL_HANDLE; - return *this; - } - - bool operator==( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_queryPool == rhs.m_queryPool; - } - - bool operator!=(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_queryPool != rhs.m_queryPool; - } - - bool operator<(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_queryPool < rhs.m_queryPool; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const VULKAN_HPP_NOEXCEPT - { - return m_queryPool; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_queryPool != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_queryPool == VK_NULL_HANDLE; - } - - private: - VkQueryPool m_queryPool; - }; - static_assert( sizeof( QueryPool ) == sizeof( VkQueryPool ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = QueryPool; + using Type = AccelerationStructureCreateInfoKHR; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ class Buffer { public: using CType = VkBuffer; - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eBuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer; public: VULKAN_HPP_CONSTEXPR Buffer() VULKAN_HPP_NOEXCEPT @@ -14037,6 +15462,9 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Buffer const& ) const = default; +#else bool operator==( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_buffer == rhs.m_buffer; @@ -14051,6 +15479,7 @@ namespace VULKAN_HPP_NAMESPACE { return m_buffer < rhs.m_buffer; } +#endif VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBuffer() const VULKAN_HPP_NOEXCEPT { @@ -14070,5405 +15499,61 @@ namespace VULKAN_HPP_NAMESPACE private: VkBuffer m_buffer; }; - static_assert( sizeof( Buffer ) == sizeof( VkBuffer ), "handle and wrapper have different size!" ); + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Buffer ) == sizeof( VkBuffer ), "handle and wrapper have different size!" ); template <> - struct cpp_type + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type { - using type = Buffer; + using type = VULKAN_HPP_NAMESPACE::Buffer; }; - class PipelineLayout - { - public: - using CType = VkPipelineLayout; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePipelineLayout; - - public: - VULKAN_HPP_CONSTEXPR PipelineLayout() VULKAN_HPP_NOEXCEPT - : m_pipelineLayout(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR PipelineLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_pipelineLayout(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT PipelineLayout( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT - : m_pipelineLayout( pipelineLayout ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - PipelineLayout & operator=(VkPipelineLayout pipelineLayout) VULKAN_HPP_NOEXCEPT - { - m_pipelineLayout = pipelineLayout; - return *this; - } -#endif - - PipelineLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_pipelineLayout = VK_NULL_HANDLE; - return *this; - } - - bool operator==( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipelineLayout == rhs.m_pipelineLayout; - } - - bool operator!=(PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipelineLayout != rhs.m_pipelineLayout; - } - - bool operator<(PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipelineLayout < rhs.m_pipelineLayout; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineLayout() const VULKAN_HPP_NOEXCEPT - { - return m_pipelineLayout; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_pipelineLayout != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_pipelineLayout == VK_NULL_HANDLE; - } - - private: - VkPipelineLayout m_pipelineLayout; - }; - static_assert( sizeof( PipelineLayout ) == sizeof( VkPipelineLayout ), "handle and wrapper have different size!" ); - template <> - struct cpp_type + struct CppType { - using type = PipelineLayout; + using Type = VULKAN_HPP_NAMESPACE::Buffer; }; - class DescriptorSet - { - public: - using CType = VkDescriptorSet; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDescriptorSet; - - public: - VULKAN_HPP_CONSTEXPR DescriptorSet() VULKAN_HPP_NOEXCEPT - : m_descriptorSet(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_descriptorSet(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT - : m_descriptorSet( descriptorSet ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DescriptorSet & operator=(VkDescriptorSet descriptorSet) VULKAN_HPP_NOEXCEPT - { - m_descriptorSet = descriptorSet; - return *this; - } -#endif - - DescriptorSet & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_descriptorSet = VK_NULL_HANDLE; - return *this; - } - - bool operator==( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSet == rhs.m_descriptorSet; - } - - bool operator!=(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSet != rhs.m_descriptorSet; - } - - bool operator<(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSet < rhs.m_descriptorSet; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSet; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSet != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSet == VK_NULL_HANDLE; - } - - private: - VkDescriptorSet m_descriptorSet; - }; - static_assert( sizeof( DescriptorSet ) == sizeof( VkDescriptorSet ), "handle and wrapper have different size!" ); template <> - struct cpp_type + struct CppType { - using type = DescriptorSet; + using Type = VULKAN_HPP_NAMESPACE::Buffer; }; - class Pipeline - { - public: - using CType = VkPipeline; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePipeline; - - public: - VULKAN_HPP_CONSTEXPR Pipeline() VULKAN_HPP_NOEXCEPT - : m_pipeline(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_pipeline(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT - : m_pipeline( pipeline ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Pipeline & operator=(VkPipeline pipeline) VULKAN_HPP_NOEXCEPT - { - m_pipeline = pipeline; - return *this; - } -#endif - - Pipeline & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_pipeline = VK_NULL_HANDLE; - return *this; - } - - bool operator==( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipeline == rhs.m_pipeline; - } - - bool operator!=(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipeline != rhs.m_pipeline; - } - - bool operator<(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipeline < rhs.m_pipeline; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const VULKAN_HPP_NOEXCEPT - { - return m_pipeline; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_pipeline != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_pipeline == VK_NULL_HANDLE; - } - - private: - VkPipeline m_pipeline; - }; - static_assert( sizeof( Pipeline ) == sizeof( VkPipeline ), "handle and wrapper have different size!" ); template <> - struct cpp_type + struct isVulkanHandleType { - using type = Pipeline; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; - class ImageView - { - public: - using CType = VkImageView; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eImageView; - - public: - VULKAN_HPP_CONSTEXPR ImageView() VULKAN_HPP_NOEXCEPT - : m_imageView(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_imageView(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView ) VULKAN_HPP_NOEXCEPT - : m_imageView( imageView ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - ImageView & operator=(VkImageView imageView) VULKAN_HPP_NOEXCEPT - { - m_imageView = imageView; - return *this; - } -#endif - - ImageView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_imageView = VK_NULL_HANDLE; - return *this; - } - - bool operator==( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_imageView == rhs.m_imageView; - } - - bool operator!=(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_imageView != rhs.m_imageView; - } - - bool operator<(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_imageView < rhs.m_imageView; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const VULKAN_HPP_NOEXCEPT - { - return m_imageView; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_imageView != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_imageView == VK_NULL_HANDLE; - } - - private: - VkImageView m_imageView; - }; - static_assert( sizeof( ImageView ) == sizeof( VkImageView ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = ImageView; - }; - - class Image - { - public: - using CType = VkImage; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eImage; - - public: - VULKAN_HPP_CONSTEXPR Image() VULKAN_HPP_NOEXCEPT - : m_image(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Image( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_image(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Image( VkImage image ) VULKAN_HPP_NOEXCEPT - : m_image( image ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Image & operator=(VkImage image) VULKAN_HPP_NOEXCEPT - { - m_image = image; - return *this; - } -#endif - - Image & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_image = VK_NULL_HANDLE; - return *this; - } - - bool operator==( Image const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_image == rhs.m_image; - } - - bool operator!=(Image const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_image != rhs.m_image; - } - - bool operator<(Image const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_image < rhs.m_image; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImage() const VULKAN_HPP_NOEXCEPT - { - return m_image; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_image != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_image == VK_NULL_HANDLE; - } - - private: - VkImage m_image; - }; - static_assert( sizeof( Image ) == sizeof( VkImage ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = Image; - }; - - class AccelerationStructureNV - { - public: - using CType = VkAccelerationStructureNV; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eAccelerationStructureNV; - - public: - VULKAN_HPP_CONSTEXPR AccelerationStructureNV() VULKAN_HPP_NOEXCEPT - : m_accelerationStructureNV(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR AccelerationStructureNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_accelerationStructureNV(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT AccelerationStructureNV( VkAccelerationStructureNV accelerationStructureNV ) VULKAN_HPP_NOEXCEPT - : m_accelerationStructureNV( accelerationStructureNV ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - AccelerationStructureNV & operator=(VkAccelerationStructureNV accelerationStructureNV) VULKAN_HPP_NOEXCEPT - { - m_accelerationStructureNV = accelerationStructureNV; - return *this; - } -#endif - - AccelerationStructureNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_accelerationStructureNV = VK_NULL_HANDLE; - return *this; - } - - bool operator==( AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_accelerationStructureNV == rhs.m_accelerationStructureNV; - } - - bool operator!=(AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_accelerationStructureNV != rhs.m_accelerationStructureNV; - } - - bool operator<(AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_accelerationStructureNV < rhs.m_accelerationStructureNV; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureNV() const VULKAN_HPP_NOEXCEPT - { - return m_accelerationStructureNV; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_accelerationStructureNV != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_accelerationStructureNV == VK_NULL_HANDLE; - } - - private: - VkAccelerationStructureNV m_accelerationStructureNV; - }; - static_assert( sizeof( AccelerationStructureNV ) == sizeof( VkAccelerationStructureNV ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = AccelerationStructureNV; - }; - - class DescriptorUpdateTemplate - { - public: - using CType = VkDescriptorUpdateTemplate; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDescriptorUpdateTemplate; - - public: - VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate() VULKAN_HPP_NOEXCEPT - : m_descriptorUpdateTemplate(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_descriptorUpdateTemplate(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT - : m_descriptorUpdateTemplate( descriptorUpdateTemplate ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DescriptorUpdateTemplate & operator=(VkDescriptorUpdateTemplate descriptorUpdateTemplate) VULKAN_HPP_NOEXCEPT - { - m_descriptorUpdateTemplate = descriptorUpdateTemplate; - return *this; - } -#endif - - DescriptorUpdateTemplate & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_descriptorUpdateTemplate = VK_NULL_HANDLE; - return *this; - } - - bool operator==( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorUpdateTemplate == rhs.m_descriptorUpdateTemplate; - } - - bool operator!=(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorUpdateTemplate != rhs.m_descriptorUpdateTemplate; - } - - bool operator<(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorUpdateTemplate < rhs.m_descriptorUpdateTemplate; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplate() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorUpdateTemplate; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorUpdateTemplate != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorUpdateTemplate == VK_NULL_HANDLE; - } - - private: - VkDescriptorUpdateTemplate m_descriptorUpdateTemplate; - }; - static_assert( sizeof( DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = DescriptorUpdateTemplate; - }; - using DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate; - - class Event - { - public: - using CType = VkEvent; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eEvent; - - public: - VULKAN_HPP_CONSTEXPR Event() VULKAN_HPP_NOEXCEPT - : m_event(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Event( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_event(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event ) VULKAN_HPP_NOEXCEPT - : m_event( event ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Event & operator=(VkEvent event) VULKAN_HPP_NOEXCEPT - { - m_event = event; - return *this; - } -#endif - - Event & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_event = VK_NULL_HANDLE; - return *this; - } - - bool operator==( Event const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_event == rhs.m_event; - } - - bool operator!=(Event const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_event != rhs.m_event; - } - - bool operator<(Event const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_event < rhs.m_event; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const VULKAN_HPP_NOEXCEPT - { - return m_event; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_event != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_event == VK_NULL_HANDLE; - } - - private: - VkEvent m_event; - }; - static_assert( sizeof( Event ) == sizeof( VkEvent ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = Event; - }; - - class CommandBuffer - { - public: - using CType = VkCommandBuffer; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eCommandBuffer; - - public: - VULKAN_HPP_CONSTEXPR CommandBuffer() VULKAN_HPP_NOEXCEPT - : m_commandBuffer(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_commandBuffer(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT CommandBuffer( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT - : m_commandBuffer( commandBuffer ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - CommandBuffer & operator=(VkCommandBuffer commandBuffer) VULKAN_HPP_NOEXCEPT - { - m_commandBuffer = commandBuffer; - return *this; - } -#endif - - CommandBuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_commandBuffer = VK_NULL_HANDLE; - return *this; - } - - bool operator==( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_commandBuffer == rhs.m_commandBuffer; - } - - bool operator!=(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_commandBuffer != rhs.m_commandBuffer; - } - - bool operator<(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_commandBuffer < rhs.m_commandBuffer; - } - - template - Result begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy counterBuffers, ArrayProxy counterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy descriptorSets, ArrayProxy dynamicOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy buffers, ArrayProxy offsets, ArrayProxy sizes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void bindVertexBuffers( uint32_t firstBinding, ArrayProxy buffers, ArrayProxy offsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment* pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect* pRects, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void clearAttachments( ArrayProxy attachments, ArrayProxy rects, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue* pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy ranges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy ranges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeNV mode, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void debugMarkerEndEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void endConditionalRenderingEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void endDebugUtilsLabelEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void endRenderPass(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void endRenderPass2( const SubpassEndInfo & subpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy counterBuffers, ArrayProxy counterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void executeCommands( ArrayProxy commandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy memoryBarriers, ArrayProxy bufferMemoryBarriers, ArrayProxy imageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void processCommandsNVX( const VULKAN_HPP_NAMESPACE::CmdProcessCommandsInfoNVX* pProcessCommandsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy values, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy descriptorWrites, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void reserveSpaceForCommandsNVX( const VULKAN_HPP_NAMESPACE::CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setBlendConstants( const float blendConstants[4], Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy customSampleOrders, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setDeviceMask( uint32_t deviceMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy discardRectangles, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy exclusiveScissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setLineWidth( float lineWidth, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - Result setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setScissor( uint32_t firstScissor, ArrayProxy scissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setViewport( uint32_t firstViewport, ArrayProxy viewports, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy shadingRatePalettes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setViewportWScalingNV( uint32_t firstViewport, ArrayProxy viewportWScalings, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void waitEvents( ArrayProxy events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy memoryBarriers, ArrayProxy bufferMemoryBarriers, ArrayProxy imageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void writeAccelerationStructuresPropertiesNV( ArrayProxy accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result end(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type end(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT - { - return m_commandBuffer; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_commandBuffer != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_commandBuffer == VK_NULL_HANDLE; - } - - private: - VkCommandBuffer m_commandBuffer; - }; - static_assert( sizeof( CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = CommandBuffer; - }; - - class DeviceMemory - { - public: - using CType = VkDeviceMemory; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDeviceMemory; - - public: - VULKAN_HPP_CONSTEXPR DeviceMemory() VULKAN_HPP_NOEXCEPT - : m_deviceMemory(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR DeviceMemory( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_deviceMemory(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT DeviceMemory( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT - : m_deviceMemory( deviceMemory ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DeviceMemory & operator=(VkDeviceMemory deviceMemory) VULKAN_HPP_NOEXCEPT - { - m_deviceMemory = deviceMemory; - return *this; - } -#endif - - DeviceMemory & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_deviceMemory = VK_NULL_HANDLE; - return *this; - } - - bool operator==( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_deviceMemory == rhs.m_deviceMemory; - } - - bool operator!=(DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_deviceMemory != rhs.m_deviceMemory; - } - - bool operator<(DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_deviceMemory < rhs.m_deviceMemory; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeviceMemory() const VULKAN_HPP_NOEXCEPT - { - return m_deviceMemory; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_deviceMemory != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_deviceMemory == VK_NULL_HANDLE; - } - - private: - VkDeviceMemory m_deviceMemory; - }; - static_assert( sizeof( DeviceMemory ) == sizeof( VkDeviceMemory ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = DeviceMemory; - }; - - class BufferView - { - public: - using CType = VkBufferView; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eBufferView; - - public: - VULKAN_HPP_CONSTEXPR BufferView() VULKAN_HPP_NOEXCEPT - : m_bufferView(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_bufferView(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT - : m_bufferView( bufferView ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - BufferView & operator=(VkBufferView bufferView) VULKAN_HPP_NOEXCEPT - { - m_bufferView = bufferView; - return *this; - } -#endif - - BufferView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_bufferView = VK_NULL_HANDLE; - return *this; - } - - bool operator==( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_bufferView == rhs.m_bufferView; - } - - bool operator!=(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_bufferView != rhs.m_bufferView; - } - - bool operator<(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_bufferView < rhs.m_bufferView; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const VULKAN_HPP_NOEXCEPT - { - return m_bufferView; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_bufferView != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_bufferView == VK_NULL_HANDLE; - } - - private: - VkBufferView m_bufferView; - }; - static_assert( sizeof( BufferView ) == sizeof( VkBufferView ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = BufferView; - }; - - class CommandPool - { - public: - using CType = VkCommandPool; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eCommandPool; - - public: - VULKAN_HPP_CONSTEXPR CommandPool() VULKAN_HPP_NOEXCEPT - : m_commandPool(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR CommandPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_commandPool(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT CommandPool( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT - : m_commandPool( commandPool ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - CommandPool & operator=(VkCommandPool commandPool) VULKAN_HPP_NOEXCEPT - { - m_commandPool = commandPool; - return *this; - } -#endif - - CommandPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_commandPool = VK_NULL_HANDLE; - return *this; - } - - bool operator==( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_commandPool == rhs.m_commandPool; - } - - bool operator!=(CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_commandPool != rhs.m_commandPool; - } - - bool operator<(CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_commandPool < rhs.m_commandPool; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandPool() const VULKAN_HPP_NOEXCEPT - { - return m_commandPool; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_commandPool != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_commandPool == VK_NULL_HANDLE; - } - - private: - VkCommandPool m_commandPool; - }; - static_assert( sizeof( CommandPool ) == sizeof( VkCommandPool ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = CommandPool; - }; - - class PipelineCache - { - public: - using CType = VkPipelineCache; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePipelineCache; - - public: - VULKAN_HPP_CONSTEXPR PipelineCache() VULKAN_HPP_NOEXCEPT - : m_pipelineCache(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR PipelineCache( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_pipelineCache(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT PipelineCache( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT - : m_pipelineCache( pipelineCache ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - PipelineCache & operator=(VkPipelineCache pipelineCache) VULKAN_HPP_NOEXCEPT - { - m_pipelineCache = pipelineCache; - return *this; - } -#endif - - PipelineCache & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_pipelineCache = VK_NULL_HANDLE; - return *this; - } - - bool operator==( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipelineCache == rhs.m_pipelineCache; - } - - bool operator!=(PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipelineCache != rhs.m_pipelineCache; - } - - bool operator<(PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipelineCache < rhs.m_pipelineCache; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineCache() const VULKAN_HPP_NOEXCEPT - { - return m_pipelineCache; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_pipelineCache != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_pipelineCache == VK_NULL_HANDLE; - } - - private: - VkPipelineCache m_pipelineCache; - }; - static_assert( sizeof( PipelineCache ) == sizeof( VkPipelineCache ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = PipelineCache; - }; - - class DescriptorPool - { - public: - using CType = VkDescriptorPool; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDescriptorPool; - - public: - VULKAN_HPP_CONSTEXPR DescriptorPool() VULKAN_HPP_NOEXCEPT - : m_descriptorPool(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR DescriptorPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_descriptorPool(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT - : m_descriptorPool( descriptorPool ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DescriptorPool & operator=(VkDescriptorPool descriptorPool) VULKAN_HPP_NOEXCEPT - { - m_descriptorPool = descriptorPool; - return *this; - } -#endif - - DescriptorPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_descriptorPool = VK_NULL_HANDLE; - return *this; - } - - bool operator==( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorPool == rhs.m_descriptorPool; - } - - bool operator!=(DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorPool != rhs.m_descriptorPool; - } - - bool operator<(DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorPool < rhs.m_descriptorPool; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorPool() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorPool; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorPool != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorPool == VK_NULL_HANDLE; - } - - private: - VkDescriptorPool m_descriptorPool; - }; - static_assert( sizeof( DescriptorPool ) == sizeof( VkDescriptorPool ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = DescriptorPool; - }; - - class DescriptorSetLayout - { - public: - using CType = VkDescriptorSetLayout; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDescriptorSetLayout; - - public: - VULKAN_HPP_CONSTEXPR DescriptorSetLayout() VULKAN_HPP_NOEXCEPT - : m_descriptorSetLayout(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR DescriptorSetLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_descriptorSetLayout(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSetLayout( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT - : m_descriptorSetLayout( descriptorSetLayout ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DescriptorSetLayout & operator=(VkDescriptorSetLayout descriptorSetLayout) VULKAN_HPP_NOEXCEPT - { - m_descriptorSetLayout = descriptorSetLayout; - return *this; - } -#endif - - DescriptorSetLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_descriptorSetLayout = VK_NULL_HANDLE; - return *this; - } - - bool operator==( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSetLayout == rhs.m_descriptorSetLayout; - } - - bool operator!=(DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSetLayout != rhs.m_descriptorSetLayout; - } - - bool operator<(DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSetLayout < rhs.m_descriptorSetLayout; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSetLayout() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSetLayout; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSetLayout != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSetLayout == VK_NULL_HANDLE; - } - - private: - VkDescriptorSetLayout m_descriptorSetLayout; - }; - static_assert( sizeof( DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = DescriptorSetLayout; - }; - - class Framebuffer - { - public: - using CType = VkFramebuffer; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eFramebuffer; - - public: - VULKAN_HPP_CONSTEXPR Framebuffer() VULKAN_HPP_NOEXCEPT - : m_framebuffer(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Framebuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_framebuffer(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Framebuffer( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT - : m_framebuffer( framebuffer ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Framebuffer & operator=(VkFramebuffer framebuffer) VULKAN_HPP_NOEXCEPT - { - m_framebuffer = framebuffer; - return *this; - } -#endif - - Framebuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_framebuffer = VK_NULL_HANDLE; - return *this; - } - - bool operator==( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_framebuffer == rhs.m_framebuffer; - } - - bool operator!=(Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_framebuffer != rhs.m_framebuffer; - } - - bool operator<(Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_framebuffer < rhs.m_framebuffer; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFramebuffer() const VULKAN_HPP_NOEXCEPT - { - return m_framebuffer; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_framebuffer != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_framebuffer == VK_NULL_HANDLE; - } - - private: - VkFramebuffer m_framebuffer; - }; - static_assert( sizeof( Framebuffer ) == sizeof( VkFramebuffer ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = Framebuffer; - }; - - class IndirectCommandsLayoutNVX - { - public: - using CType = VkIndirectCommandsLayoutNVX; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eIndirectCommandsLayoutNVX; - - public: - VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNVX() VULKAN_HPP_NOEXCEPT - : m_indirectCommandsLayoutNVX(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_indirectCommandsLayoutNVX(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutNVX( VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX ) VULKAN_HPP_NOEXCEPT - : m_indirectCommandsLayoutNVX( indirectCommandsLayoutNVX ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - IndirectCommandsLayoutNVX & operator=(VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX) VULKAN_HPP_NOEXCEPT - { - m_indirectCommandsLayoutNVX = indirectCommandsLayoutNVX; - return *this; - } -#endif - - IndirectCommandsLayoutNVX & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_indirectCommandsLayoutNVX = VK_NULL_HANDLE; - return *this; - } - - bool operator==( IndirectCommandsLayoutNVX const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_indirectCommandsLayoutNVX == rhs.m_indirectCommandsLayoutNVX; - } - - bool operator!=(IndirectCommandsLayoutNVX const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_indirectCommandsLayoutNVX != rhs.m_indirectCommandsLayoutNVX; - } - - bool operator<(IndirectCommandsLayoutNVX const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_indirectCommandsLayoutNVX < rhs.m_indirectCommandsLayoutNVX; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNVX() const VULKAN_HPP_NOEXCEPT - { - return m_indirectCommandsLayoutNVX; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_indirectCommandsLayoutNVX != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_indirectCommandsLayoutNVX == VK_NULL_HANDLE; - } - - private: - VkIndirectCommandsLayoutNVX m_indirectCommandsLayoutNVX; - }; - static_assert( sizeof( IndirectCommandsLayoutNVX ) == sizeof( VkIndirectCommandsLayoutNVX ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = IndirectCommandsLayoutNVX; - }; - - class ObjectTableNVX - { - public: - using CType = VkObjectTableNVX; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eObjectTableNVX; - - public: - VULKAN_HPP_CONSTEXPR ObjectTableNVX() VULKAN_HPP_NOEXCEPT - : m_objectTableNVX(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR ObjectTableNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_objectTableNVX(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT ObjectTableNVX( VkObjectTableNVX objectTableNVX ) VULKAN_HPP_NOEXCEPT - : m_objectTableNVX( objectTableNVX ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - ObjectTableNVX & operator=(VkObjectTableNVX objectTableNVX) VULKAN_HPP_NOEXCEPT - { - m_objectTableNVX = objectTableNVX; - return *this; - } -#endif - - ObjectTableNVX & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_objectTableNVX = VK_NULL_HANDLE; - return *this; - } - - bool operator==( ObjectTableNVX const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_objectTableNVX == rhs.m_objectTableNVX; - } - - bool operator!=(ObjectTableNVX const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_objectTableNVX != rhs.m_objectTableNVX; - } - - bool operator<(ObjectTableNVX const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_objectTableNVX < rhs.m_objectTableNVX; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkObjectTableNVX() const VULKAN_HPP_NOEXCEPT - { - return m_objectTableNVX; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_objectTableNVX != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_objectTableNVX == VK_NULL_HANDLE; - } - - private: - VkObjectTableNVX m_objectTableNVX; - }; - static_assert( sizeof( ObjectTableNVX ) == sizeof( VkObjectTableNVX ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = ObjectTableNVX; - }; - - class RenderPass - { - public: - using CType = VkRenderPass; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eRenderPass; - - public: - VULKAN_HPP_CONSTEXPR RenderPass() VULKAN_HPP_NOEXCEPT - : m_renderPass(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR RenderPass( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_renderPass(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT RenderPass( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT - : m_renderPass( renderPass ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - RenderPass & operator=(VkRenderPass renderPass) VULKAN_HPP_NOEXCEPT - { - m_renderPass = renderPass; - return *this; - } -#endif - - RenderPass & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_renderPass = VK_NULL_HANDLE; - return *this; - } - - bool operator==( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_renderPass == rhs.m_renderPass; - } - - bool operator!=(RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_renderPass != rhs.m_renderPass; - } - - bool operator<(RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_renderPass < rhs.m_renderPass; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkRenderPass() const VULKAN_HPP_NOEXCEPT - { - return m_renderPass; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_renderPass != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_renderPass == VK_NULL_HANDLE; - } - - private: - VkRenderPass m_renderPass; - }; - static_assert( sizeof( RenderPass ) == sizeof( VkRenderPass ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = RenderPass; - }; - - class Sampler - { - public: - using CType = VkSampler; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSampler; - - public: - VULKAN_HPP_CONSTEXPR Sampler() VULKAN_HPP_NOEXCEPT - : m_sampler(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_sampler(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler ) VULKAN_HPP_NOEXCEPT - : m_sampler( sampler ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Sampler & operator=(VkSampler sampler) VULKAN_HPP_NOEXCEPT - { - m_sampler = sampler; - return *this; - } -#endif - - Sampler & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_sampler = VK_NULL_HANDLE; - return *this; - } - - bool operator==( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_sampler == rhs.m_sampler; - } - - bool operator!=(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_sampler != rhs.m_sampler; - } - - bool operator<(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_sampler < rhs.m_sampler; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const VULKAN_HPP_NOEXCEPT - { - return m_sampler; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_sampler != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_sampler == VK_NULL_HANDLE; - } - - private: - VkSampler m_sampler; - }; - static_assert( sizeof( Sampler ) == sizeof( VkSampler ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = Sampler; - }; - - class SamplerYcbcrConversion - { - public: - using CType = VkSamplerYcbcrConversion; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSamplerYcbcrConversion; - - public: - VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion() VULKAN_HPP_NOEXCEPT - : m_samplerYcbcrConversion(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_samplerYcbcrConversion(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT SamplerYcbcrConversion( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT - : m_samplerYcbcrConversion( samplerYcbcrConversion ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - SamplerYcbcrConversion & operator=(VkSamplerYcbcrConversion samplerYcbcrConversion) VULKAN_HPP_NOEXCEPT - { - m_samplerYcbcrConversion = samplerYcbcrConversion; - return *this; - } -#endif - - SamplerYcbcrConversion & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_samplerYcbcrConversion = VK_NULL_HANDLE; - return *this; - } - - bool operator==( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_samplerYcbcrConversion == rhs.m_samplerYcbcrConversion; - } - - bool operator!=(SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_samplerYcbcrConversion != rhs.m_samplerYcbcrConversion; - } - - bool operator<(SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_samplerYcbcrConversion < rhs.m_samplerYcbcrConversion; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSamplerYcbcrConversion() const VULKAN_HPP_NOEXCEPT - { - return m_samplerYcbcrConversion; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_samplerYcbcrConversion != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_samplerYcbcrConversion == VK_NULL_HANDLE; - } - - private: - VkSamplerYcbcrConversion m_samplerYcbcrConversion; - }; - static_assert( sizeof( SamplerYcbcrConversion ) == sizeof( VkSamplerYcbcrConversion ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = SamplerYcbcrConversion; - }; - using SamplerYcbcrConversionKHR = SamplerYcbcrConversion; - - class ShaderModule - { - public: - using CType = VkShaderModule; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eShaderModule; - - public: - VULKAN_HPP_CONSTEXPR ShaderModule() VULKAN_HPP_NOEXCEPT - : m_shaderModule(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_shaderModule(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT - : m_shaderModule( shaderModule ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - ShaderModule & operator=(VkShaderModule shaderModule) VULKAN_HPP_NOEXCEPT - { - m_shaderModule = shaderModule; - return *this; - } -#endif - - ShaderModule & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_shaderModule = VK_NULL_HANDLE; - return *this; - } - - bool operator==( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_shaderModule == rhs.m_shaderModule; - } - - bool operator!=(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_shaderModule != rhs.m_shaderModule; - } - - bool operator<(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_shaderModule < rhs.m_shaderModule; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const VULKAN_HPP_NOEXCEPT - { - return m_shaderModule; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_shaderModule != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_shaderModule == VK_NULL_HANDLE; - } - - private: - VkShaderModule m_shaderModule; - }; - static_assert( sizeof( ShaderModule ) == sizeof( VkShaderModule ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = ShaderModule; - }; - - class ValidationCacheEXT - { - public: - using CType = VkValidationCacheEXT; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eValidationCacheEXT; - - public: - VULKAN_HPP_CONSTEXPR ValidationCacheEXT() VULKAN_HPP_NOEXCEPT - : m_validationCacheEXT(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR ValidationCacheEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_validationCacheEXT(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT ValidationCacheEXT( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT - : m_validationCacheEXT( validationCacheEXT ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - ValidationCacheEXT & operator=(VkValidationCacheEXT validationCacheEXT) VULKAN_HPP_NOEXCEPT - { - m_validationCacheEXT = validationCacheEXT; - return *this; - } -#endif - - ValidationCacheEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_validationCacheEXT = VK_NULL_HANDLE; - return *this; - } - - bool operator==( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_validationCacheEXT == rhs.m_validationCacheEXT; - } - - bool operator!=(ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_validationCacheEXT != rhs.m_validationCacheEXT; - } - - bool operator<(ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_validationCacheEXT < rhs.m_validationCacheEXT; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkValidationCacheEXT() const VULKAN_HPP_NOEXCEPT - { - return m_validationCacheEXT; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_validationCacheEXT != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_validationCacheEXT == VK_NULL_HANDLE; - } - - private: - VkValidationCacheEXT m_validationCacheEXT; - }; - static_assert( sizeof( ValidationCacheEXT ) == sizeof( VkValidationCacheEXT ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = ValidationCacheEXT; - }; - - class Queue - { - public: - using CType = VkQueue; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eQueue; - - public: - VULKAN_HPP_CONSTEXPR Queue() VULKAN_HPP_NOEXCEPT - : m_queue(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Queue( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_queue(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Queue( VkQueue queue ) VULKAN_HPP_NOEXCEPT - : m_queue( queue ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Queue & operator=(VkQueue queue) VULKAN_HPP_NOEXCEPT - { - m_queue = queue; - return *this; - } -#endif - - Queue & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_queue = VK_NULL_HANDLE; - return *this; - } - - bool operator==( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_queue == rhs.m_queue; - } - - bool operator!=(Queue const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_queue != rhs.m_queue; - } - - bool operator<(Queue const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_queue < rhs.m_queue; - } - - template - void getCheckpointDataNV( uint32_t* pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointDataNV* pCheckpointData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getCheckpointDataNV(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getCheckpointDataNV(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result bindSparse( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindSparseInfo* pBindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type bindSparse( ArrayProxy bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void endDebugUtilsLabelEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR* pPresentInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result presentKHR( const PresentInfoKHR & presentInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo* pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type submit( ArrayProxy submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueue() const VULKAN_HPP_NOEXCEPT - { - return m_queue; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_queue != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_queue == VK_NULL_HANDLE; - } - - private: - VkQueue m_queue; - }; - static_assert( sizeof( Queue ) == sizeof( VkQueue ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = Queue; - }; - -#ifndef VULKAN_HPP_NO_SMART_HANDLE - class Device; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueAccelerationStructureNV = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueBuffer = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueBufferView = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = PoolFree; }; - using UniqueCommandBuffer = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueCommandPool = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueDescriptorPool = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = PoolFree; }; - using UniqueDescriptorSet = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueDescriptorSetLayout = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueDescriptorUpdateTemplate = UniqueHandle; - using UniqueDescriptorUpdateTemplateKHR = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectFree; }; - using UniqueDeviceMemory = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueEvent = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueFence = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueFramebuffer = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueImage = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueImageView = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueIndirectCommandsLayoutNVX = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueObjectTableNVX = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniquePipeline = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniquePipelineCache = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniquePipelineLayout = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueQueryPool = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueRenderPass = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueSampler = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueSamplerYcbcrConversion = UniqueHandle; - using UniqueSamplerYcbcrConversionKHR = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueSemaphore = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueShaderModule = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueSwapchainKHR = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueValidationCacheEXT = UniqueHandle; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ - - class Device - { - public: - using CType = VkDevice; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDevice; - - public: - VULKAN_HPP_CONSTEXPR Device() VULKAN_HPP_NOEXCEPT - : m_device(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Device( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_device(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Device( VkDevice device ) VULKAN_HPP_NOEXCEPT - : m_device( device ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Device & operator=(VkDevice device) VULKAN_HPP_NOEXCEPT - { - m_device = device; - return *this; - } -#endif - - Device & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_device = VK_NULL_HANDLE; - return *this; - } - - bool operator==( Device const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_device == rhs.m_device; - } - - bool operator!=(Device const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_device != rhs.m_device; - } - - bool operator<(Device const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_device < rhs.m_device; - } - -#ifdef VK_USE_PLATFORM_WIN32_KHR -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - template - Result acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValue acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, uint32_t* pImageIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValue acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL* pConfiguration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType,Allocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType,Allocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType,Allocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType,Allocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo* pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory* pMemory, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type bindAccelerationStructureMemoryNV( ArrayProxy bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type bindBufferMemory2( ArrayProxy bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result bindBufferMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type bindBufferMemory2KHR( ArrayProxy bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type bindImageMemory2( ArrayProxy bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result bindImageMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type bindImageMemory2KHR( ArrayProxy bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructure, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Buffer* pBuffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createBuffer( const BufferCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createBufferUnique( const BufferCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::BufferView* pView, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createBufferView( const BufferViewCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::CommandPool* pCommandPool, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createCommandPool( const CommandPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - typename ResultValueType::type createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType,Allocator>>::type createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType,Allocator>>::type createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - typename ResultValueType>::type createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool* pDescriptorPool, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Event* pEvent, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createEvent( const EventCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createEventUnique( const EventCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createFence( const FenceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createFenceUnique( const FenceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer* pFramebuffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createFramebuffer( const FramebufferCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - typename ResultValueType::type createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType,Allocator>>::type createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType,Allocator>>::type createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - typename ResultValueType>::type createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Image* pImage, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createImage( const ImageCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createImageUnique( const ImageCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ImageView* pView, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createImageView( const ImageViewCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createIndirectCommandsLayoutNVX( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX* pIndirectCommandsLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createIndirectCommandsLayoutNVXUnique( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createObjectTableNVX( const VULKAN_HPP_NAMESPACE::ObjectTableCreateInfoNVX* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ObjectTableNVX* pObjectTable, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createObjectTableNVXUnique( const ObjectTableCreateInfoNVX & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache* pPipelineCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout* pPipelineLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::QueryPool* pQueryPool, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createQueryPool( const QueryPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - typename ResultValueType::type createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType,Allocator>>::type createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType,Allocator>>::type createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - typename ResultValueType>::type createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createRenderPass( const RenderPassCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createRenderPass2( const RenderPassCreateInfo2 & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Sampler* pSampler, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createSampler( const SamplerCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createSamplerUnique( const SamplerCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Semaphore* pSemaphore, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createSemaphore( const SemaphoreCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ShaderModule* pShaderModule, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createSharedSwapchainsKHR( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type createSharedSwapchainsKHR( ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type createSharedSwapchainsKHR( ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - typename ResultValueType::type createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - typename ResultValueType>::type createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pValidationCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Event event, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Fence fence, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyImage( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyImage( VULKAN_HPP_NAMESPACE::Image image, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Image image, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyIndirectCommandsLayoutNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyIndirectCommandsLayoutNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyObjectTableNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyObjectTableNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result flushMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type flushMappedMemoryRanges( ArrayProxy memoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy commandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy commandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy descriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy descriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, ArrayProxy data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - template - Result getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template - typename ResultValueType>::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - - template - DeviceAddress getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - DeviceAddress getBufferAddress( const BufferDeviceAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - DeviceAddress getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - DeviceAddress getBufferAddressEXT( const BufferDeviceAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - DeviceAddress getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - DeviceAddress getBufferAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::MemoryRequirements getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - uint64_t getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - uint64_t getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getCalibratedTimestampsEXT( uint32_t timestampCount, const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getCalibratedTimestampsEXT( ArrayProxy timestampInfos, ArrayProxy timestamps, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getGroupPresentCapabilitiesKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - template - Result getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize* pCommittedMemoryInBytes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::DeviceSize getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - uint64_t getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - uint64_t getMemoryOpaqueCaptureAddressKHR( const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::Queue getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2* pQueueInfo, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::Queue getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - template - Result getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::MemoryRequirements getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource* pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout* pLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::SubresourceLayout getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const ImageSubresource & subresource, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - uint32_t getImageViewHandleNVX( const ImageViewHandleInfoNVX & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - template - Result getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - - template - Result getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - template - Result getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, VULKAN_HPP_NAMESPACE::PerformanceValueINTEL* pValue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR* pInternalRepresentations, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR* pStatistics, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy data, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, VULKAN_HPP_NAMESPACE::Extent2D* pGranularity, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::Extent2D getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - template - Result getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VULKAN_HPP_NAMESPACE::Image* pSwapchainImages, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - template - Result importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - template - Result initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type invalidateMappedMemoryRanges( ArrayProxy memoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void** ppData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags = MemoryMapFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::PipelineCache* pSrcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, ArrayProxy srcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pSrcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, ArrayProxy srcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT* pDeviceEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT* pDisplayEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result registerObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, uint32_t objectCount, const VULKAN_HPP_NAMESPACE::ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type registerObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, ArrayProxy pObjectTableEntries, ArrayProxy objectIndices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void releaseProfilingLockKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags = DescriptorPoolResetFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags = DescriptorPoolResetFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result resetFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type resetFences( ArrayProxy fences, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - Result setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setHdrMetadataEXT( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, const VULKAN_HPP_NAMESPACE::HdrMetadataEXT* pMetadata, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setHdrMetadataEXT( ArrayProxy swapchains, ArrayProxy metadata, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - Result signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags = CommandPoolTrimFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags = CommandPoolTrimFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void uninitializePerformanceApiINTEL(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - Result unregisterObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, uint32_t objectCount, const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type unregisterObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, ArrayProxy objectEntryTypes, ArrayProxy objectIndices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void updateDescriptorSets( uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet* pDescriptorCopies, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void updateDescriptorSets( ArrayProxy descriptorWrites, ArrayProxy descriptorCopies, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result waitForFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result waitForFences( ArrayProxy fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result waitSemaphores( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDevice() const VULKAN_HPP_NOEXCEPT - { - return m_device; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_device != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_device == VK_NULL_HANDLE; - } - - private: - VkDevice m_device; - }; - static_assert( sizeof( Device ) == sizeof( VkDevice ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = Device; - }; - - class DisplayModeKHR - { - public: - using CType = VkDisplayModeKHR; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDisplayModeKHR; - - public: - VULKAN_HPP_CONSTEXPR DisplayModeKHR() VULKAN_HPP_NOEXCEPT - : m_displayModeKHR(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR DisplayModeKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_displayModeKHR(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT DisplayModeKHR( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT - : m_displayModeKHR( displayModeKHR ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DisplayModeKHR & operator=(VkDisplayModeKHR displayModeKHR) VULKAN_HPP_NOEXCEPT - { - m_displayModeKHR = displayModeKHR; - return *this; - } -#endif - - DisplayModeKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_displayModeKHR = VK_NULL_HANDLE; - return *this; - } - - bool operator==( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_displayModeKHR == rhs.m_displayModeKHR; - } - - bool operator!=(DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_displayModeKHR != rhs.m_displayModeKHR; - } - - bool operator<(DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_displayModeKHR < rhs.m_displayModeKHR; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayModeKHR() const VULKAN_HPP_NOEXCEPT - { - return m_displayModeKHR; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_displayModeKHR != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_displayModeKHR == VK_NULL_HANDLE; - } - - private: - VkDisplayModeKHR m_displayModeKHR; - }; - static_assert( sizeof( DisplayModeKHR ) == sizeof( VkDisplayModeKHR ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = DisplayModeKHR; - }; - -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueDevice = UniqueHandle; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ - - class PhysicalDevice - { - public: - using CType = VkPhysicalDevice; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePhysicalDevice; - - public: - VULKAN_HPP_CONSTEXPR PhysicalDevice() VULKAN_HPP_NOEXCEPT - : m_physicalDevice(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR PhysicalDevice( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_physicalDevice(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT PhysicalDevice( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT - : m_physicalDevice( physicalDevice ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - PhysicalDevice & operator=(VkPhysicalDevice physicalDevice) VULKAN_HPP_NOEXCEPT - { - m_physicalDevice = physicalDevice; - return *this; - } -#endif - - PhysicalDevice & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_physicalDevice = VK_NULL_HANDLE; - return *this; - } - - bool operator==( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_physicalDevice == rhs.m_physicalDevice; - } - - bool operator!=(PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_physicalDevice != rhs.m_physicalDevice; - } - - bool operator<(PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_physicalDevice < rhs.m_physicalDevice; - } - -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - template - Result acquireXlibDisplayEXT( Display* dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type acquireXlibDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - - template - Result createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Device* pDevice, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDevice( const DeviceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDeviceUnique( const DeviceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DisplayModeKHR* pMode, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateDeviceExtensionProperties( Optional layerName = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateDeviceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result enumerateDeviceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateDeviceLayerProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateDeviceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, uint32_t* pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR* pCounters, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR* pCounterDescriptions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy counters, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy counters, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR* pDisplayPlaneInfo, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplays, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainEXT* pTimeDomains, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getCalibrateableTimeDomainsEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getCalibrateableTimeDomainsEXT(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getCooperativeMatrixPropertiesNV( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getCooperativeMatrixPropertiesNV(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getCooperativeMatrixPropertiesNV(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayPlaneProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayPlaneProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayPlanePropertiesKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayPlanePropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getDisplayProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getDisplayPropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayPropertiesKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayPropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures getFeatures(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getFeatures2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getFeatures2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties* pFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::FormatProperties getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getGeneratedCommandsPropertiesNVX( VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsFeaturesNVX* pFeatures, VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX* pLimits, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties* pImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template - typename ResultValueType>::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template - typename ResultValueType>::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties getMemoryProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getMemoryProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getMemoryProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pRectCount, VULKAN_HPP_NAMESPACE::Rect2D* pRects, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties getProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - uint32_t getQueueFamilyPerformanceQueryPassesKHR( const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties* pQueueFamilyProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t* pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV* pCombinations, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSupportedFramebufferMixedSamplesCombinationsNV(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSupportedFramebufferMixedSamplesCombinationsNV(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template - typename ResultValueType>::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - template - Result getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::Bool32* pSupported, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getToolPropertiesEXT( uint32_t* pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT* pToolProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getToolPropertiesEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getToolPropertiesEXT(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - template - Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VK_USE_PLATFORM_XCB_KHR - template - Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XCB_KHR*/ - -#ifdef VK_USE_PLATFORM_XLIB_KHR - template - Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - template - Result getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplay, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPhysicalDevice() const VULKAN_HPP_NOEXCEPT - { - return m_physicalDevice; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_physicalDevice != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_physicalDevice == VK_NULL_HANDLE; - } - - private: - VkPhysicalDevice m_physicalDevice; - }; - static_assert( sizeof( PhysicalDevice ) == sizeof( VkPhysicalDevice ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = PhysicalDevice; - }; - -#ifndef VULKAN_HPP_NO_SMART_HANDLE - class Instance; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueDebugReportCallbackEXT = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueDebugUtilsMessengerEXT = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueSurfaceKHR = UniqueHandle; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ - - class Instance - { - public: - using CType = VkInstance; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eInstance; - - public: - VULKAN_HPP_CONSTEXPR Instance() VULKAN_HPP_NOEXCEPT - : m_instance(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Instance( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_instance(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Instance( VkInstance instance ) VULKAN_HPP_NOEXCEPT - : m_instance( instance ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Instance & operator=(VkInstance instance) VULKAN_HPP_NOEXCEPT - { - m_instance = instance; - return *this; - } -#endif - - Instance & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_instance = VK_NULL_HANDLE; - return *this; - } - - bool operator==( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_instance == rhs.m_instance; - } - - bool operator!=(Instance const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_instance != rhs.m_instance; - } - - bool operator<(Instance const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_instance < rhs.m_instance; - } - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - template - Result createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - - template - Result createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT* pCallback, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT* pMessenger, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_IOS_MVK - template - Result createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_IOS_MVK*/ - -#ifdef VK_USE_PLATFORM_FUCHSIA - template - Result createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - -#ifdef VK_USE_PLATFORM_MACOS_MVK - template - Result createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ - -#ifdef VK_USE_PLATFORM_METAL_EXT - template - Result createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - -#ifdef VK_USE_PLATFORM_GGP - template - Result createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_GGP*/ - -#ifdef VK_USE_PLATFORM_VI_NN - template - Result createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_VI_NN*/ - -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - template - Result createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VK_USE_PLATFORM_XCB_KHR - template - Result createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XCB_KHR*/ - -#ifdef VK_USE_PLATFORM_XLIB_KHR - template - Result createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - - template - void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumeratePhysicalDeviceGroups(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumeratePhysicalDeviceGroups(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumeratePhysicalDeviceGroupsKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumeratePhysicalDeviceGroupsKHR(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumeratePhysicalDevices(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumeratePhysicalDevices(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkInstance() const VULKAN_HPP_NOEXCEPT - { - return m_instance; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_instance != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_instance == VK_NULL_HANDLE; - } - - private: - VkInstance m_instance; - }; - static_assert( sizeof( Instance ) == sizeof( VkInstance ), "handle and wrapper have different size!" ); - - template <> - struct cpp_type - { - using type = Instance; - }; - -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueInstance = UniqueHandle; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ - - template - Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Instance* pInstance, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createInstance( const InstanceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ); -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ); -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ); - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ); -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateInstanceLayerProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ); - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateInstanceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ); -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type enumerateInstanceVersion(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ); -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - struct GeometryTrianglesNV { - VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( VULKAN_HPP_NAMESPACE::Buffer vertexData_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ = {}, - uint32_t vertexCount_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, - VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::Buffer indexData_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ = {}, - uint32_t indexCount_ = {}, - VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, - VULKAN_HPP_NAMESPACE::Buffer transformData_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ = {} ) VULKAN_HPP_NOEXCEPT - : vertexData( vertexData_ ) - , vertexOffset( vertexOffset_ ) - , vertexCount( vertexCount_ ) - , vertexStride( vertexStride_ ) - , vertexFormat( vertexFormat_ ) - , indexData( indexData_ ) - , indexOffset( indexOffset_ ) - , indexCount( indexCount_ ) - , indexType( indexType_ ) - , transformData( transformData_ ) - , transformOffset( transformOffset_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryTrianglesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeometryTrianglesNV(VULKAN_HPP_NAMESPACE::Buffer vertexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ = {}, uint32_t vertexCount_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Buffer indexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ = {}, uint32_t indexCount_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, VULKAN_HPP_NAMESPACE::Buffer transformData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ = {}) VULKAN_HPP_NOEXCEPT + : vertexData( vertexData_ ), vertexOffset( vertexOffset_ ), vertexCount( vertexCount_ ), vertexStride( vertexStride_ ), vertexFormat( vertexFormat_ ), indexData( indexData_ ), indexOffset( indexOffset_ ), indexCount( indexCount_ ), indexType( indexType_ ), transformData( transformData_ ), transformOffset( transformOffset_ ) {} - VULKAN_HPP_NAMESPACE::GeometryTrianglesNV & operator=( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV ) - offsetof( GeometryTrianglesNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; GeometryTrianglesNV( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - GeometryTrianglesNV& operator=( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT + GeometryTrianglesNV & operator=( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + GeometryTrianglesNV & operator=( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( GeometryTrianglesNV ) ); return *this; } @@ -19544,6 +15629,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkGeometryTrianglesNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -19554,6 +15640,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( GeometryTrianglesNV const& ) const = default; +#else bool operator==( GeometryTrianglesNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -19575,6 +15665,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryTrianglesNV; @@ -19590,36 +15683,44 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; VULKAN_HPP_NAMESPACE::Buffer transformData = {}; VULKAN_HPP_NAMESPACE::DeviceSize transformOffset = {}; + }; static_assert( sizeof( GeometryTrianglesNV ) == sizeof( VkGeometryTrianglesNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = GeometryTrianglesNV; + }; + struct GeometryAABBNV { - VULKAN_HPP_CONSTEXPR GeometryAABBNV( VULKAN_HPP_NAMESPACE::Buffer aabbData_ = {}, - uint32_t numAABBs_ = {}, - uint32_t stride_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT - : aabbData( aabbData_ ) - , numAABBs( numAABBs_ ) - , stride( stride_ ) - , offset( offset_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryAabbNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeometryAABBNV(VULKAN_HPP_NAMESPACE::Buffer aabbData_ = {}, uint32_t numAABBs_ = {}, uint32_t stride_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}) VULKAN_HPP_NOEXCEPT + : aabbData( aabbData_ ), numAABBs( numAABBs_ ), stride( stride_ ), offset( offset_ ) {} - VULKAN_HPP_NAMESPACE::GeometryAABBNV & operator=( VULKAN_HPP_NAMESPACE::GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::GeometryAABBNV ) - offsetof( GeometryAABBNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR GeometryAABBNV( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; GeometryAABBNV( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - GeometryAABBNV& operator=( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT + GeometryAABBNV & operator=( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + GeometryAABBNV & operator=( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( GeometryAABBNV ) ); return *this; } @@ -19653,6 +15754,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkGeometryAABBNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -19663,6 +15765,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( GeometryAABBNV const& ) const = default; +#else bool operator==( GeometryAABBNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -19677,6 +15783,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryAabbNV; @@ -19685,41 +15794,59 @@ namespace VULKAN_HPP_NAMESPACE uint32_t numAABBs = {}; uint32_t stride = {}; VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + }; static_assert( sizeof( GeometryAABBNV ) == sizeof( VkGeometryAABBNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = GeometryAABBNV; + }; + struct GeometryDataNV { - VULKAN_HPP_CONSTEXPR GeometryDataNV( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles_ = {}, - VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ = {} ) VULKAN_HPP_NOEXCEPT - : triangles( triangles_ ) - , aabbs( aabbs_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeometryDataNV(VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles_ = {}, VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ = {}) VULKAN_HPP_NOEXCEPT + : triangles( triangles_ ), aabbs( aabbs_ ) {} + VULKAN_HPP_CONSTEXPR GeometryDataNV( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + GeometryDataNV( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - GeometryDataNV& operator=( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + GeometryDataNV & operator=( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - GeometryDataNV & setTriangles( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles_ ) VULKAN_HPP_NOEXCEPT + GeometryDataNV & operator=( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( GeometryDataNV ) ); + return *this; + } + + GeometryDataNV & setTriangles( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const & triangles_ ) VULKAN_HPP_NOEXCEPT { triangles = triangles_; return *this; } - GeometryDataNV & setAabbs( VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ ) VULKAN_HPP_NOEXCEPT + GeometryDataNV & setAabbs( VULKAN_HPP_NAMESPACE::GeometryAABBNV const & aabbs_ ) VULKAN_HPP_NOEXCEPT { aabbs = aabbs_; return *this; } + operator VkGeometryDataNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -19730,6 +15857,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( GeometryDataNV const& ) const = default; +#else bool operator==( GeometryDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( triangles == rhs.triangles ) @@ -19740,38 +15871,45 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles = {}; VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs = {}; + }; static_assert( sizeof( GeometryDataNV ) == sizeof( VkGeometryDataNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct GeometryNV { - VULKAN_HPP_CONSTEXPR GeometryNV( VULKAN_HPP_NAMESPACE::GeometryTypeNV geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeNV::eTriangles, - VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ = {}, - VULKAN_HPP_NAMESPACE::GeometryFlagsNV flags_ = {} ) VULKAN_HPP_NOEXCEPT - : geometryType( geometryType_ ) - , geometry( geometry_ ) - , flags( flags_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeometryNV(VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ = {}, VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {}) VULKAN_HPP_NOEXCEPT + : geometryType( geometryType_ ), geometry( geometry_ ), flags( flags_ ) {} - VULKAN_HPP_NAMESPACE::GeometryNV & operator=( VULKAN_HPP_NAMESPACE::GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::GeometryNV ) - offsetof( GeometryNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR GeometryNV( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; GeometryNV( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - GeometryNV& operator=( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT + GeometryNV & operator=( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + GeometryNV & operator=( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( GeometryNV ) ); return *this; } @@ -19781,24 +15919,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - GeometryNV & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeNV geometryType_ ) VULKAN_HPP_NOEXCEPT + GeometryNV & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT { geometryType = geometryType_; return *this; } - GeometryNV & setGeometry( VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ ) VULKAN_HPP_NOEXCEPT + GeometryNV & setGeometry( VULKAN_HPP_NAMESPACE::GeometryDataNV const & geometry_ ) VULKAN_HPP_NOEXCEPT { geometry = geometry_; return *this; } - GeometryNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + GeometryNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } + operator VkGeometryNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -19809,6 +15948,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( GeometryNV const& ) const = default; +#else bool operator==( GeometryNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -19822,45 +15965,60 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryNV; const void* pNext = {}; - VULKAN_HPP_NAMESPACE::GeometryTypeNV geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeNV::eTriangles; + VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles; VULKAN_HPP_NAMESPACE::GeometryDataNV geometry = {}; - VULKAN_HPP_NAMESPACE::GeometryFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {}; + }; static_assert( sizeof( GeometryNV ) == sizeof( VkGeometryNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = GeometryNV; + }; + struct AccelerationStructureInfoNV { - VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV::eTopLevel, - VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ = {}, - uint32_t instanceCount_ = {}, - uint32_t geometryCount_ = {}, - const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries_ = {} ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - , flags( flags_ ) - , instanceCount( instanceCount_ ) - , geometryCount( geometryCount_ ) - , pGeometries( pGeometries_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV(VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ = {}, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ = {}, uint32_t instanceCount_ = {}, uint32_t geometryCount_ = {}, const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries_ = {}) VULKAN_HPP_NOEXCEPT + : type( type_ ), flags( flags_ ), instanceCount( instanceCount_ ), geometryCount( geometryCount_ ), pGeometries( pGeometries_ ) {} - VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & operator=( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV ) - offsetof( AccelerationStructureInfoNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureInfoNV( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - AccelerationStructureInfoNV& operator=( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_, uint32_t instanceCount_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & geometries_ ) + : type( type_ ), flags( flags_ ), instanceCount( instanceCount_ ), geometryCount( static_cast( geometries_.size() ) ), pGeometries( geometries_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + AccelerationStructureInfoNV & operator=( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AccelerationStructureInfoNV & operator=( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AccelerationStructureInfoNV ) ); return *this; } @@ -19900,6 +16058,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + AccelerationStructureInfoNV & setGeometries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & geometries_ ) VULKAN_HPP_NOEXCEPT + { + geometryCount = static_cast( geometries_.size() ); + pGeometries = geometries_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkAccelerationStructureInfoNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -19910,6 +16078,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AccelerationStructureInfoNV const& ) const = default; +#else bool operator==( AccelerationStructureInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -19925,41 +16097,56 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureInfoNV; const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV::eTopLevel; + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type = {}; VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags = {}; uint32_t instanceCount = {}; uint32_t geometryCount = {}; const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries = {}; + }; static_assert( sizeof( AccelerationStructureInfoNV ) == sizeof( VkAccelerationStructureInfoNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = AccelerationStructureInfoNV; + }; + struct AccelerationStructureCreateInfoNV { - VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {}, - VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ = {} ) VULKAN_HPP_NOEXCEPT - : compactedSize( compactedSize_ ) - , info( info_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV(VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ = {}) VULKAN_HPP_NOEXCEPT + : compactedSize( compactedSize_ ), info( info_ ) {} - VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV ) - offsetof( AccelerationStructureCreateInfoNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureCreateInfoNV( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - AccelerationStructureCreateInfoNV& operator=( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateInfoNV & operator=( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AccelerationStructureCreateInfoNV & operator=( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AccelerationStructureCreateInfoNV ) ); return *this; } @@ -19975,12 +16162,13 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - AccelerationStructureCreateInfoNV & setInfo( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateInfoNV & setInfo( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const & info_ ) VULKAN_HPP_NOEXCEPT { info = info_; return *this; } + operator VkAccelerationStructureCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -19991,6 +16179,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AccelerationStructureCreateInfoNV const& ) const = default; +#else bool operator==( AccelerationStructureCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -20003,38 +16195,429 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoNV; const void* pNext = {}; VULKAN_HPP_NAMESPACE::DeviceSize compactedSize = {}; VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info = {}; + }; static_assert( sizeof( AccelerationStructureCreateInfoNV ) == sizeof( VkAccelerationStructureCreateInfoNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct AccelerationStructureMemoryRequirementsInfoNV + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject, - VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - , accelerationStructure( accelerationStructure_ ) + using Type = AccelerationStructureCreateInfoNV; + }; + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureDeviceAddressInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureDeviceAddressInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {}) VULKAN_HPP_NOEXCEPT + : accelerationStructure( accelerationStructure_ ) {} - VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & operator=( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureDeviceAddressInfoKHR( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV ) - offsetof( AccelerationStructureMemoryRequirementsInfoNV, pNext ) ); + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + AccelerationStructureDeviceAddressInfoKHR & operator=( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } + AccelerationStructureDeviceAddressInfoKHR & operator=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AccelerationStructureDeviceAddressInfoKHR ) ); + return *this; + } + + AccelerationStructureDeviceAddressInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureDeviceAddressInfoKHR & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } + + + operator VkAccelerationStructureDeviceAddressInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureDeviceAddressInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AccelerationStructureDeviceAddressInfoKHR const& ) const = default; +#else + bool operator==( AccelerationStructureDeviceAddressInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( accelerationStructure == rhs.accelerationStructure ); + } + + bool operator!=( AccelerationStructureDeviceAddressInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureDeviceAddressInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {}; + + }; + static_assert( sizeof( AccelerationStructureDeviceAddressInfoKHR ) == sizeof( VkAccelerationStructureDeviceAddressInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureDeviceAddressInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct TransformMatrixKHR + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR(std::array,3> const& matrix_ = {}) VULKAN_HPP_NOEXCEPT + : matrix( matrix_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TransformMatrixKHR( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + TransformMatrixKHR & operator=( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + TransformMatrixKHR & operator=( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( TransformMatrixKHR ) ); + return *this; + } + + TransformMatrixKHR & setMatrix( std::array,3> matrix_ ) VULKAN_HPP_NOEXCEPT + { + matrix = matrix_; + return *this; + } + + + operator VkTransformMatrixKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTransformMatrixKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( TransformMatrixKHR const& ) const = default; +#else + bool operator==( TransformMatrixKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( matrix == rhs.matrix ); + } + + bool operator!=( TransformMatrixKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper2D matrix = {}; + + }; + static_assert( sizeof( TransformMatrixKHR ) == sizeof( VkTransformMatrixKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + using TransformMatrixNV = TransformMatrixKHR; + + struct AccelerationStructureInstanceKHR + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR(VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform_ = {}, uint32_t instanceCustomIndex_ = {}, uint32_t mask_ = {}, uint32_t instanceShaderBindingTableRecordOffset_ = {}, VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, uint64_t accelerationStructureReference_ = {}) VULKAN_HPP_NOEXCEPT + : transform( transform_ ), instanceCustomIndex( instanceCustomIndex_ ), mask( mask_ ), instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ ), flags( flags_ ), accelerationStructureReference( accelerationStructureReference_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureInstanceKHR( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + AccelerationStructureInstanceKHR & operator=( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AccelerationStructureInstanceKHR & operator=( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AccelerationStructureInstanceKHR ) ); + return *this; + } + + AccelerationStructureInstanceKHR & setTransform( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transform_ ) VULKAN_HPP_NOEXCEPT + { + transform = transform_; + return *this; + } + + AccelerationStructureInstanceKHR & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT + { + instanceCustomIndex = instanceCustomIndex_; + return *this; + } + + AccelerationStructureInstanceKHR & setMask( uint32_t mask_ ) VULKAN_HPP_NOEXCEPT + { + mask = mask_; + return *this; + } + + AccelerationStructureInstanceKHR & setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT + { + instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_; + return *this; + } + + AccelerationStructureInstanceKHR & setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = *reinterpret_cast(&flags_); + return *this; + } + + AccelerationStructureInstanceKHR & setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureReference = accelerationStructureReference_; + return *this; + } + + + operator VkAccelerationStructureInstanceKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureInstanceKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AccelerationStructureInstanceKHR const& ) const = default; +#else + bool operator==( AccelerationStructureInstanceKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( transform == rhs.transform ) + && ( instanceCustomIndex == rhs.instanceCustomIndex ) + && ( mask == rhs.mask ) + && ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) + && ( flags == rhs.flags ) + && ( accelerationStructureReference == rhs.accelerationStructureReference ); + } + + bool operator!=( AccelerationStructureInstanceKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform = {}; + uint32_t instanceCustomIndex : 24; + uint32_t mask : 8; + uint32_t instanceShaderBindingTableRecordOffset : 24; + VkGeometryInstanceFlagsKHR flags : 8; + uint64_t accelerationStructureReference = {}; + + }; + static_assert( sizeof( AccelerationStructureInstanceKHR ) == sizeof( VkAccelerationStructureInstanceKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + using AccelerationStructureInstanceNV = AccelerationStructureInstanceKHR; + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureMemoryRequirementsInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureMemoryRequirementsInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR::eObject, VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType_ = VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR::eHost, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {}) VULKAN_HPP_NOEXCEPT + : type( type_ ), buildType( buildType_ ), accelerationStructure( accelerationStructure_ ) + {} + + VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoKHR( AccelerationStructureMemoryRequirementsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureMemoryRequirementsInfoKHR( VkAccelerationStructureMemoryRequirementsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + AccelerationStructureMemoryRequirementsInfoKHR & operator=( VkAccelerationStructureMemoryRequirementsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AccelerationStructureMemoryRequirementsInfoKHR & operator=( AccelerationStructureMemoryRequirementsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AccelerationStructureMemoryRequirementsInfoKHR ) ); + return *this; + } + + AccelerationStructureMemoryRequirementsInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureMemoryRequirementsInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + AccelerationStructureMemoryRequirementsInfoKHR & setBuildType( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType_ ) VULKAN_HPP_NOEXCEPT + { + buildType = buildType_; + return *this; + } + + AccelerationStructureMemoryRequirementsInfoKHR & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } + + + operator VkAccelerationStructureMemoryRequirementsInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureMemoryRequirementsInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AccelerationStructureMemoryRequirementsInfoKHR const& ) const = default; +#else + bool operator==( AccelerationStructureMemoryRequirementsInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( type == rhs.type ) + && ( buildType == rhs.buildType ) + && ( accelerationStructure == rhs.accelerationStructure ); + } + + bool operator!=( AccelerationStructureMemoryRequirementsInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR::eObject; + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType = VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR::eHost; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {}; + + }; + static_assert( sizeof( AccelerationStructureMemoryRequirementsInfoKHR ) == sizeof( VkAccelerationStructureMemoryRequirementsInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureMemoryRequirementsInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct AccelerationStructureMemoryRequirementsInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV(VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {}) VULKAN_HPP_NOEXCEPT + : type( type_ ), accelerationStructure( accelerationStructure_ ) + {} + + VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - AccelerationStructureMemoryRequirementsInfoNV& operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureMemoryRequirementsInfoNV & operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AccelerationStructureMemoryRequirementsInfoNV & operator=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AccelerationStructureMemoryRequirementsInfoNV ) ); return *this; } @@ -20056,6 +16639,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkAccelerationStructureMemoryRequirementsInfoNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -20066,6 +16650,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AccelerationStructureMemoryRequirementsInfoNV const& ) const = default; +#else bool operator==( AccelerationStructureMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -20078,44 +16666,442 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV; const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject; + VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type = {}; VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {}; + }; static_assert( sizeof( AccelerationStructureMemoryRequirementsInfoNV ) == sizeof( VkAccelerationStructureMemoryRequirementsInfoNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct AcquireNextImageInfoKHR + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, - uint64_t timeout_ = {}, - VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, - VULKAN_HPP_NAMESPACE::Fence fence_ = {}, - uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT - : swapchain( swapchain_ ) - , timeout( timeout_ ) - , semaphore( semaphore_ ) - , fence( fence_ ) - , deviceMask( deviceMask_ ) + using Type = AccelerationStructureMemoryRequirementsInfoNV; + }; + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct AccelerationStructureVersionKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureVersionKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureVersionKHR(const uint8_t* versionData_ = {}) VULKAN_HPP_NOEXCEPT + : versionData( versionData_ ) {} - VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & operator=( VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR AccelerationStructureVersionKHR( AccelerationStructureVersionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureVersionKHR( VkAccelerationStructureVersionKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR ) - offsetof( AcquireNextImageInfoKHR, pNext ) ); + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + AccelerationStructureVersionKHR & operator=( VkAccelerationStructureVersionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } + AccelerationStructureVersionKHR & operator=( AccelerationStructureVersionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AccelerationStructureVersionKHR ) ); + return *this; + } + + AccelerationStructureVersionKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureVersionKHR & setVersionData( const uint8_t* versionData_ ) VULKAN_HPP_NOEXCEPT + { + versionData = versionData_; + return *this; + } + + + operator VkAccelerationStructureVersionKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureVersionKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AccelerationStructureVersionKHR const& ) const = default; +#else + bool operator==( AccelerationStructureVersionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( versionData == rhs.versionData ); + } + + bool operator!=( AccelerationStructureVersionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureVersionKHR; + const void* pNext = {}; + const uint8_t* versionData = {}; + + }; + static_assert( sizeof( AccelerationStructureVersionKHR ) == sizeof( VkAccelerationStructureVersionKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureVersionKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + class SwapchainKHR + { + public: + using CType = VkSwapchainKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR; + + public: + VULKAN_HPP_CONSTEXPR SwapchainKHR() VULKAN_HPP_NOEXCEPT + : m_swapchainKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR SwapchainKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_swapchainKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT SwapchainKHR( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT + : m_swapchainKHR( swapchainKHR ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + SwapchainKHR & operator=(VkSwapchainKHR swapchainKHR) VULKAN_HPP_NOEXCEPT + { + m_swapchainKHR = swapchainKHR; + return *this; + } +#endif + + SwapchainKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_swapchainKHR = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SwapchainKHR const& ) const = default; +#else + bool operator==( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR == rhs.m_swapchainKHR; + } + + bool operator!=(SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR != rhs.m_swapchainKHR; + } + + bool operator<(SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR < rhs.m_swapchainKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSwapchainKHR() const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR == VK_NULL_HANDLE; + } + + private: + VkSwapchainKHR m_swapchainKHR; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::SwapchainKHR ) == sizeof( VkSwapchainKHR ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::SwapchainKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Semaphore + { + public: + using CType = VkSemaphore; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore; + + public: + VULKAN_HPP_CONSTEXPR Semaphore() VULKAN_HPP_NOEXCEPT + : m_semaphore(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Semaphore( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_semaphore(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Semaphore( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT + : m_semaphore( semaphore ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Semaphore & operator=(VkSemaphore semaphore) VULKAN_HPP_NOEXCEPT + { + m_semaphore = semaphore; + return *this; + } +#endif + + Semaphore & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_semaphore = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Semaphore const& ) const = default; +#else + bool operator==( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_semaphore == rhs.m_semaphore; + } + + bool operator!=(Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_semaphore != rhs.m_semaphore; + } + + bool operator<(Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_semaphore < rhs.m_semaphore; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSemaphore() const VULKAN_HPP_NOEXCEPT + { + return m_semaphore; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_semaphore != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_semaphore == VK_NULL_HANDLE; + } + + private: + VkSemaphore m_semaphore; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Semaphore ) == sizeof( VkSemaphore ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Semaphore; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Semaphore; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Semaphore; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Fence + { + public: + using CType = VkFence; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFence; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence; + + public: + VULKAN_HPP_CONSTEXPR Fence() VULKAN_HPP_NOEXCEPT + : m_fence(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Fence( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_fence(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Fence( VkFence fence ) VULKAN_HPP_NOEXCEPT + : m_fence( fence ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Fence & operator=(VkFence fence) VULKAN_HPP_NOEXCEPT + { + m_fence = fence; + return *this; + } +#endif + + Fence & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_fence = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Fence const& ) const = default; +#else + bool operator==( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_fence == rhs.m_fence; + } + + bool operator!=(Fence const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_fence != rhs.m_fence; + } + + bool operator<(Fence const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_fence < rhs.m_fence; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFence() const VULKAN_HPP_NOEXCEPT + { + return m_fence; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_fence != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_fence == VK_NULL_HANDLE; + } + + private: + VkFence m_fence; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Fence ) == sizeof( VkFence ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Fence; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Fence; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Fence; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct AcquireNextImageInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireNextImageInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, uint64_t timeout_ = {}, VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::Fence fence_ = {}, uint32_t deviceMask_ = {}) VULKAN_HPP_NOEXCEPT + : swapchain( swapchain_ ), timeout( timeout_ ), semaphore( semaphore_ ), fence( fence_ ), deviceMask( deviceMask_ ) + {} + + VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + AcquireNextImageInfoKHR( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - AcquireNextImageInfoKHR& operator=( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AcquireNextImageInfoKHR & operator=( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AcquireNextImageInfoKHR & operator=( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AcquireNextImageInfoKHR ) ); return *this; } @@ -20155,6 +17141,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkAcquireNextImageInfoKHR const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -20165,6 +17152,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AcquireNextImageInfoKHR const& ) const = default; +#else bool operator==( AcquireNextImageInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -20180,6 +17171,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireNextImageInfoKHR; @@ -20189,32 +17183,44 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; VULKAN_HPP_NAMESPACE::Fence fence = {}; uint32_t deviceMask = {}; + }; static_assert( sizeof( AcquireNextImageInfoKHR ) == sizeof( VkAcquireNextImageInfoKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = AcquireNextImageInfoKHR; + }; + struct AcquireProfilingLockInfoKHR { - VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ = {}, - uint64_t timeout_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , timeout( timeout_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireProfilingLockInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR(VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ = {}, uint64_t timeout_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), timeout( timeout_ ) {} - VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & operator=( VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR ) - offsetof( AcquireProfilingLockInfoKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; AcquireProfilingLockInfoKHR( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - AcquireProfilingLockInfoKHR& operator=( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AcquireProfilingLockInfoKHR & operator=( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AcquireProfilingLockInfoKHR & operator=( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AcquireProfilingLockInfoKHR ) ); return *this; } @@ -20236,6 +17242,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkAcquireProfilingLockInfoKHR const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -20246,6 +17253,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AcquireProfilingLockInfoKHR const& ) const = default; +#else bool operator==( AcquireProfilingLockInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -20258,40 +17269,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireProfilingLockInfoKHR; const void* pNext = {}; VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags = {}; uint64_t timeout = {}; + }; static_assert( sizeof( AcquireProfilingLockInfoKHR ) == sizeof( VkAcquireProfilingLockInfoKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = AcquireProfilingLockInfoKHR; + }; + struct AllocationCallbacks { - VULKAN_HPP_CONSTEXPR AllocationCallbacks( void* pUserData_ = {}, - PFN_vkAllocationFunction pfnAllocation_ = {}, - PFN_vkReallocationFunction pfnReallocation_ = {}, - PFN_vkFreeFunction pfnFree_ = {}, - PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {}, - PFN_vkInternalFreeNotification pfnInternalFree_ = {} ) VULKAN_HPP_NOEXCEPT - : pUserData( pUserData_ ) - , pfnAllocation( pfnAllocation_ ) - , pfnReallocation( pfnReallocation_ ) - , pfnFree( pfnFree_ ) - , pfnInternalAllocation( pfnInternalAllocation_ ) - , pfnInternalFree( pfnInternalFree_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AllocationCallbacks(void* pUserData_ = {}, PFN_vkAllocationFunction pfnAllocation_ = {}, PFN_vkReallocationFunction pfnReallocation_ = {}, PFN_vkFreeFunction pfnFree_ = {}, PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {}, PFN_vkInternalFreeNotification pfnInternalFree_ = {}) VULKAN_HPP_NOEXCEPT + : pUserData( pUserData_ ), pfnAllocation( pfnAllocation_ ), pfnReallocation( pfnReallocation_ ), pfnFree( pfnFree_ ), pfnInternalAllocation( pfnInternalAllocation_ ), pfnInternalFree( pfnInternalFree_ ) {} + VULKAN_HPP_CONSTEXPR AllocationCallbacks( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default; + AllocationCallbacks( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - AllocationCallbacks& operator=( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT + AllocationCallbacks & operator=( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AllocationCallbacks & operator=( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AllocationCallbacks ) ); return *this; } @@ -20331,6 +17354,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkAllocationCallbacks const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -20341,6 +17365,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AllocationCallbacks const& ) const = default; +#else bool operator==( AllocationCallbacks const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( pUserData == rhs.pUserData ) @@ -20355,6 +17383,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: void* pUserData = {}; @@ -20363,30 +17394,37 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkFreeFunction pfnFree = {}; PFN_vkInternalAllocationNotification pfnInternalAllocation = {}; PFN_vkInternalFreeNotification pfnInternalFree = {}; + }; static_assert( sizeof( AllocationCallbacks ) == sizeof( VkAllocationCallbacks ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ComponentMapping { - VULKAN_HPP_CONSTEXPR ComponentMapping( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, - VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, - VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, - VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity ) VULKAN_HPP_NOEXCEPT - : r( r_ ) - , g( g_ ) - , b( b_ ) - , a( a_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ComponentMapping(VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity) VULKAN_HPP_NOEXCEPT + : r( r_ ), g( g_ ), b( b_ ), a( a_ ) {} + VULKAN_HPP_CONSTEXPR ComponentMapping( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ComponentMapping( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - ComponentMapping& operator=( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT + ComponentMapping & operator=( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ComponentMapping & operator=( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ComponentMapping ) ); return *this; } @@ -20414,6 +17452,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkComponentMapping const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -20424,6 +17463,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ComponentMapping const& ) const = default; +#else bool operator==( ComponentMapping const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( r == rhs.r ) @@ -20436,55 +17479,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: VULKAN_HPP_NAMESPACE::ComponentSwizzle r = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; VULKAN_HPP_NAMESPACE::ComponentSwizzle g = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; VULKAN_HPP_NAMESPACE::ComponentSwizzle b = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; VULKAN_HPP_NAMESPACE::ComponentSwizzle a = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; + }; static_assert( sizeof( ComponentMapping ) == sizeof( VkComponentMapping ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #ifdef VK_USE_PLATFORM_ANDROID_KHR - struct AndroidHardwareBufferFormatPropertiesANDROID { - AndroidHardwareBufferFormatPropertiesANDROID( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - uint64_t externalFormat_ = {}, - VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, - VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, - VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, - VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, - VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, - VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven ) VULKAN_HPP_NOEXCEPT - : format( format_ ) - , externalFormat( externalFormat_ ) - , formatFeatures( formatFeatures_ ) - , samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ ) - , suggestedYcbcrModel( suggestedYcbcrModel_ ) - , suggestedYcbcrRange( suggestedYcbcrRange_ ) - , suggestedXChromaOffset( suggestedXChromaOffset_ ) - , suggestedYChromaOffset( suggestedYChromaOffset_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint64_t externalFormat_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven) VULKAN_HPP_NOEXCEPT + : format( format_ ), externalFormat( externalFormat_ ), formatFeatures( formatFeatures_ ), samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ ), suggestedYcbcrModel( suggestedYcbcrModel_ ), suggestedYcbcrRange( suggestedYcbcrRange_ ), suggestedXChromaOffset( suggestedXChromaOffset_ ), suggestedYChromaOffset( suggestedYChromaOffset_ ) {} - VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID & operator=( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID ) - offsetof( AndroidHardwareBufferFormatPropertiesANDROID, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; AndroidHardwareBufferFormatPropertiesANDROID( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - AndroidHardwareBufferFormatPropertiesANDROID& operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + AndroidHardwareBufferFormatPropertiesANDROID & operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + AndroidHardwareBufferFormatPropertiesANDROID & operator=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AndroidHardwareBufferFormatPropertiesANDROID ) ); + return *this; + } + + operator VkAndroidHardwareBufferFormatPropertiesANDROID const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -20495,6 +17535,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AndroidHardwareBufferFormatPropertiesANDROID const& ) const = default; +#else bool operator==( AndroidHardwareBufferFormatPropertiesANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -20513,6 +17557,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID; @@ -20525,38 +17572,50 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + }; static_assert( sizeof( AndroidHardwareBufferFormatPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferFormatPropertiesANDROID ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AndroidHardwareBufferFormatPropertiesANDROID; + }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #ifdef VK_USE_PLATFORM_ANDROID_KHR - struct AndroidHardwareBufferPropertiesANDROID { - AndroidHardwareBufferPropertiesANDROID( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, - uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT - : allocationSize( allocationSize_ ) - , memoryTypeBits( memoryTypeBits_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferPropertiesANDROID; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID(VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT + : allocationSize( allocationSize_ ), memoryTypeBits( memoryTypeBits_ ) {} - VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & operator=( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID ) - offsetof( AndroidHardwareBufferPropertiesANDROID, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; AndroidHardwareBufferPropertiesANDROID( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - AndroidHardwareBufferPropertiesANDROID& operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + AndroidHardwareBufferPropertiesANDROID & operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + AndroidHardwareBufferPropertiesANDROID & operator=( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AndroidHardwareBufferPropertiesANDROID ) ); + return *this; + } + + operator VkAndroidHardwareBufferPropertiesANDROID const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -20567,6 +17626,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AndroidHardwareBufferPropertiesANDROID const& ) const = default; +#else bool operator==( AndroidHardwareBufferPropertiesANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -20579,42 +17642,59 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferPropertiesANDROID; void* pNext = {}; VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {}; uint32_t memoryTypeBits = {}; + }; static_assert( sizeof( AndroidHardwareBufferPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferPropertiesANDROID ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AndroidHardwareBufferPropertiesANDROID; + }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #ifdef VK_USE_PLATFORM_ANDROID_KHR - struct AndroidHardwareBufferUsageANDROID { - AndroidHardwareBufferUsageANDROID( uint64_t androidHardwareBufferUsage_ = {} ) VULKAN_HPP_NOEXCEPT - : androidHardwareBufferUsage( androidHardwareBufferUsage_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferUsageANDROID; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID(uint64_t androidHardwareBufferUsage_ = {}) VULKAN_HPP_NOEXCEPT + : androidHardwareBufferUsage( androidHardwareBufferUsage_ ) {} - VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID & operator=( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID ) - offsetof( AndroidHardwareBufferUsageANDROID, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; AndroidHardwareBufferUsageANDROID( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - AndroidHardwareBufferUsageANDROID& operator=( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + AndroidHardwareBufferUsageANDROID & operator=( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + AndroidHardwareBufferUsageANDROID & operator=( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AndroidHardwareBufferUsageANDROID ) ); + return *this; + } + + operator VkAndroidHardwareBufferUsageANDROID const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -20625,6 +17705,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AndroidHardwareBufferUsageANDROID const& ) const = default; +#else bool operator==( AndroidHardwareBufferUsageANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -20636,40 +17720,54 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferUsageANDROID; void* pNext = {}; uint64_t androidHardwareBufferUsage = {}; + }; static_assert( sizeof( AndroidHardwareBufferUsageANDROID ) == sizeof( VkAndroidHardwareBufferUsageANDROID ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AndroidHardwareBufferUsageANDROID; + }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #ifdef VK_USE_PLATFORM_ANDROID_KHR - struct AndroidSurfaceCreateInfoKHR { - VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ = {}, - struct ANativeWindow* window_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , window( window_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidSurfaceCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ = {}, struct ANativeWindow* window_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), window( window_ ) {} - VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR ) - offsetof( AndroidSurfaceCreateInfoKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - AndroidSurfaceCreateInfoKHR& operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AndroidSurfaceCreateInfoKHR & operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AndroidSurfaceCreateInfoKHR & operator=( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AndroidSurfaceCreateInfoKHR ) ); return *this; } @@ -20691,6 +17789,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkAndroidSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -20701,6 +17800,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AndroidSurfaceCreateInfoKHR const& ) const = default; +#else bool operator==( AndroidSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -20713,45 +17816,54 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidSurfaceCreateInfoKHR; const void* pNext = {}; VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags = {}; struct ANativeWindow* window = {}; + }; static_assert( sizeof( AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = AndroidSurfaceCreateInfoKHR; + }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ struct ApplicationInfo { - VULKAN_HPP_CONSTEXPR ApplicationInfo( const char* pApplicationName_ = {}, - uint32_t applicationVersion_ = {}, - const char* pEngineName_ = {}, - uint32_t engineVersion_ = {}, - uint32_t apiVersion_ = {} ) VULKAN_HPP_NOEXCEPT - : pApplicationName( pApplicationName_ ) - , applicationVersion( applicationVersion_ ) - , pEngineName( pEngineName_ ) - , engineVersion( engineVersion_ ) - , apiVersion( apiVersion_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eApplicationInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ApplicationInfo(const char* pApplicationName_ = {}, uint32_t applicationVersion_ = {}, const char* pEngineName_ = {}, uint32_t engineVersion_ = {}, uint32_t apiVersion_ = {}) VULKAN_HPP_NOEXCEPT + : pApplicationName( pApplicationName_ ), applicationVersion( applicationVersion_ ), pEngineName( pEngineName_ ), engineVersion( engineVersion_ ), apiVersion( apiVersion_ ) {} - VULKAN_HPP_NAMESPACE::ApplicationInfo & operator=( VULKAN_HPP_NAMESPACE::ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ApplicationInfo ) - offsetof( ApplicationInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR ApplicationInfo( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; ApplicationInfo( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - ApplicationInfo& operator=( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ApplicationInfo & operator=( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ApplicationInfo & operator=( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ApplicationInfo ) ); return *this; } @@ -20791,6 +17903,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkApplicationInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -20801,6 +17914,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ApplicationInfo const& ) const = default; +#else bool operator==( ApplicationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -20816,6 +17933,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eApplicationInfo; @@ -20825,40 +17945,43 @@ namespace VULKAN_HPP_NAMESPACE const char* pEngineName = {}; uint32_t engineVersion = {}; uint32_t apiVersion = {}; + }; static_assert( sizeof( ApplicationInfo ) == sizeof( VkApplicationInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = ApplicationInfo; + }; + struct AttachmentDescription { - VULKAN_HPP_CONSTEXPR AttachmentDescription( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, - VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, - VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, - VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, - VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , format( format_ ) - , samples( samples_ ) - , loadOp( loadOp_ ) - , storeOp( storeOp_ ) - , stencilLoadOp( stencilLoadOp_ ) - , stencilStoreOp( stencilStoreOp_ ) - , initialLayout( initialLayout_ ) - , finalLayout( finalLayout_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentDescription(VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), format( format_ ), samples( samples_ ), loadOp( loadOp_ ), storeOp( storeOp_ ), stencilLoadOp( stencilLoadOp_ ), stencilStoreOp( stencilStoreOp_ ), initialLayout( initialLayout_ ), finalLayout( finalLayout_ ) {} + VULKAN_HPP_CONSTEXPR AttachmentDescription( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + AttachmentDescription( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - AttachmentDescription& operator=( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT + AttachmentDescription & operator=( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AttachmentDescription & operator=( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AttachmentDescription ) ); return *this; } @@ -20916,6 +18039,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkAttachmentDescription const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -20926,6 +18050,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AttachmentDescription const& ) const = default; +#else bool operator==( AttachmentDescription const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( flags == rhs.flags ) @@ -20943,6 +18071,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {}; @@ -20954,46 +18085,38 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; static_assert( sizeof( AttachmentDescription ) == sizeof( VkAttachmentDescription ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct AttachmentDescription2 { - VULKAN_HPP_CONSTEXPR AttachmentDescription2( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, - VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, - VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, - VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, - VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , format( format_ ) - , samples( samples_ ) - , loadOp( loadOp_ ) - , storeOp( storeOp_ ) - , stencilLoadOp( stencilLoadOp_ ) - , stencilStoreOp( stencilStoreOp_ ) - , initialLayout( initialLayout_ ) - , finalLayout( finalLayout_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescription2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentDescription2(VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), format( format_ ), samples( samples_ ), loadOp( loadOp_ ), storeOp( storeOp_ ), stencilLoadOp( stencilLoadOp_ ), stencilStoreOp( stencilStoreOp_ ), initialLayout( initialLayout_ ), finalLayout( finalLayout_ ) {} - VULKAN_HPP_NAMESPACE::AttachmentDescription2 & operator=( VULKAN_HPP_NAMESPACE::AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescription2 ) - offsetof( AttachmentDescription2, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR AttachmentDescription2( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; AttachmentDescription2( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - AttachmentDescription2& operator=( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT + AttachmentDescription2 & operator=( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AttachmentDescription2 & operator=( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AttachmentDescription2 ) ); return *this; } @@ -21057,6 +18180,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkAttachmentDescription2 const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -21067,6 +18191,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AttachmentDescription2 const& ) const = default; +#else bool operator==( AttachmentDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -21086,6 +18214,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescription2; @@ -21099,32 +18230,45 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; static_assert( sizeof( AttachmentDescription2 ) == sizeof( VkAttachmentDescription2 ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = AttachmentDescription2; + }; + using AttachmentDescription2KHR = AttachmentDescription2; + struct AttachmentDescriptionStencilLayout { - VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT - : stencilInitialLayout( stencilInitialLayout_ ) - , stencilFinalLayout( stencilFinalLayout_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescriptionStencilLayout; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout(VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT + : stencilInitialLayout( stencilInitialLayout_ ), stencilFinalLayout( stencilFinalLayout_ ) {} - VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout & operator=( VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout ) - offsetof( AttachmentDescriptionStencilLayout, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; AttachmentDescriptionStencilLayout( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - AttachmentDescriptionStencilLayout& operator=( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT + AttachmentDescriptionStencilLayout & operator=( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AttachmentDescriptionStencilLayout & operator=( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AttachmentDescriptionStencilLayout ) ); return *this; } @@ -21146,6 +18290,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkAttachmentDescriptionStencilLayout const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -21156,6 +18301,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AttachmentDescriptionStencilLayout const& ) const = default; +#else bool operator==( AttachmentDescriptionStencilLayout const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -21168,32 +18317,53 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescriptionStencilLayout; void* pNext = {}; VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; static_assert( sizeof( AttachmentDescriptionStencilLayout ) == sizeof( VkAttachmentDescriptionStencilLayout ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = AttachmentDescriptionStencilLayout; + }; + using AttachmentDescriptionStencilLayoutKHR = AttachmentDescriptionStencilLayout; + struct AttachmentReference { - VULKAN_HPP_CONSTEXPR AttachmentReference( uint32_t attachment_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT - : attachment( attachment_ ) - , layout( layout_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentReference(uint32_t attachment_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT + : attachment( attachment_ ), layout( layout_ ) {} + VULKAN_HPP_CONSTEXPR AttachmentReference( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; + AttachmentReference( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - AttachmentReference& operator=( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT + AttachmentReference & operator=( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AttachmentReference & operator=( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AttachmentReference ) ); return *this; } @@ -21209,6 +18379,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkAttachmentReference const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -21219,6 +18390,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AttachmentReference const& ) const = default; +#else bool operator==( AttachmentReference const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( attachment == rhs.attachment ) @@ -21229,38 +18404,45 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: uint32_t attachment = {}; VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; static_assert( sizeof( AttachmentReference ) == sizeof( VkAttachmentReference ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct AttachmentReference2 { - VULKAN_HPP_CONSTEXPR AttachmentReference2( uint32_t attachment_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT - : attachment( attachment_ ) - , layout( layout_ ) - , aspectMask( aspectMask_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReference2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentReference2(uint32_t attachment_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}) VULKAN_HPP_NOEXCEPT + : attachment( attachment_ ), layout( layout_ ), aspectMask( aspectMask_ ) {} - VULKAN_HPP_NAMESPACE::AttachmentReference2 & operator=( VULKAN_HPP_NAMESPACE::AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AttachmentReference2 ) - offsetof( AttachmentReference2, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR AttachmentReference2( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; AttachmentReference2( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - AttachmentReference2& operator=( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT + AttachmentReference2 & operator=( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AttachmentReference2 & operator=( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AttachmentReference2 ) ); return *this; } @@ -21288,6 +18470,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkAttachmentReference2 const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -21298,6 +18481,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AttachmentReference2 const& ) const = default; +#else bool operator==( AttachmentReference2 const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -21311,6 +18498,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReference2; @@ -21318,30 +18508,45 @@ namespace VULKAN_HPP_NAMESPACE uint32_t attachment = {}; VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + }; static_assert( sizeof( AttachmentReference2 ) == sizeof( VkAttachmentReference2 ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = AttachmentReference2; + }; + using AttachmentReference2KHR = AttachmentReference2; + struct AttachmentReferenceStencilLayout { - VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT - : stencilLayout( stencilLayout_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReferenceStencilLayout; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout(VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT + : stencilLayout( stencilLayout_ ) {} - VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout & operator=( VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout ) - offsetof( AttachmentReferenceStencilLayout, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; AttachmentReferenceStencilLayout( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - AttachmentReferenceStencilLayout& operator=( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT + AttachmentReferenceStencilLayout & operator=( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AttachmentReferenceStencilLayout & operator=( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AttachmentReferenceStencilLayout ) ); return *this; } @@ -21357,6 +18562,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkAttachmentReferenceStencilLayout const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -21367,6 +18573,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AttachmentReferenceStencilLayout const& ) const = default; +#else bool operator==( AttachmentReferenceStencilLayout const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -21378,31 +18588,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReferenceStencilLayout; void* pNext = {}; VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; static_assert( sizeof( AttachmentReferenceStencilLayout ) == sizeof( VkAttachmentReferenceStencilLayout ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = AttachmentReferenceStencilLayout; + }; + using AttachmentReferenceStencilLayoutKHR = AttachmentReferenceStencilLayout; + struct Extent2D { - VULKAN_HPP_CONSTEXPR Extent2D( uint32_t width_ = {}, - uint32_t height_ = {} ) VULKAN_HPP_NOEXCEPT - : width( width_ ) - , height( height_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Extent2D(uint32_t width_ = {}, uint32_t height_ = {}) VULKAN_HPP_NOEXCEPT + : width( width_ ), height( height_ ) {} + VULKAN_HPP_CONSTEXPR Extent2D( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + Extent2D( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - Extent2D& operator=( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT + Extent2D & operator=( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + Extent2D & operator=( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( Extent2D ) ); return *this; } @@ -21418,6 +18649,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkExtent2D const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -21428,6 +18660,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Extent2D const& ) const = default; +#else bool operator==( Extent2D const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( width == rhs.width ) @@ -21438,30 +18674,44 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: uint32_t width = {}; uint32_t height = {}; + }; static_assert( sizeof( Extent2D ) == sizeof( VkExtent2D ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SampleLocationEXT { - VULKAN_HPP_CONSTEXPR SampleLocationEXT( float x_ = {}, - float y_ = {} ) VULKAN_HPP_NOEXCEPT - : x( x_ ) - , y( y_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SampleLocationEXT(float x_ = {}, float y_ = {}) VULKAN_HPP_NOEXCEPT + : x( x_ ), y( y_ ) {} + VULKAN_HPP_CONSTEXPR SampleLocationEXT( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SampleLocationEXT( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - SampleLocationEXT& operator=( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SampleLocationEXT & operator=( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SampleLocationEXT & operator=( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SampleLocationEXT ) ); return *this; } @@ -21477,6 +18727,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkSampleLocationEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -21487,6 +18738,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SampleLocationEXT const& ) const = default; +#else bool operator==( SampleLocationEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( x == rhs.x ) @@ -21497,40 +18752,51 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: float x = {}; float y = {}; + }; static_assert( sizeof( SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SampleLocationsInfoEXT { - VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ = {}, - uint32_t sampleLocationsCount_ = {}, - const VULKAN_HPP_NAMESPACE::SampleLocationEXT* pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT - : sampleLocationsPerPixel( sampleLocationsPerPixel_ ) - , sampleLocationGridSize( sampleLocationGridSize_ ) - , sampleLocationsCount( sampleLocationsCount_ ) - , pSampleLocations( pSampleLocations_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSampleLocationsInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ = {}, uint32_t sampleLocationsCount_ = {}, const VULKAN_HPP_NAMESPACE::SampleLocationEXT* pSampleLocations_ = {}) VULKAN_HPP_NOEXCEPT + : sampleLocationsPerPixel( sampleLocationsPerPixel_ ), sampleLocationGridSize( sampleLocationGridSize_ ), sampleLocationsCount( sampleLocationsCount_ ), pSampleLocations( pSampleLocations_ ) {} - VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & operator=( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT ) - offsetof( SampleLocationsInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; SampleLocationsInfoEXT( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - SampleLocationsInfoEXT& operator=( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SampleLocationsInfoEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_, VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & sampleLocations_ ) + : sampleLocationsPerPixel( sampleLocationsPerPixel_ ), sampleLocationGridSize( sampleLocationGridSize_ ), sampleLocationsCount( static_cast( sampleLocations_.size() ) ), pSampleLocations( sampleLocations_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SampleLocationsInfoEXT & operator=( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SampleLocationsInfoEXT & operator=( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SampleLocationsInfoEXT ) ); return *this; } @@ -21546,7 +18812,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SampleLocationsInfoEXT & setSampleLocationGridSize( VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ ) VULKAN_HPP_NOEXCEPT + SampleLocationsInfoEXT & setSampleLocationGridSize( VULKAN_HPP_NAMESPACE::Extent2D const & sampleLocationGridSize_ ) VULKAN_HPP_NOEXCEPT { sampleLocationGridSize = sampleLocationGridSize_; return *this; @@ -21564,6 +18830,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SampleLocationsInfoEXT & setSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & sampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsCount = static_cast( sampleLocations_.size() ); + pSampleLocations = sampleLocations_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkSampleLocationsInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -21574,6 +18850,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SampleLocationsInfoEXT const& ) const = default; +#else bool operator==( SampleLocationsInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -21588,6 +18868,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSampleLocationsInfoEXT; @@ -21596,26 +18879,43 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize = {}; uint32_t sampleLocationsCount = {}; const VULKAN_HPP_NAMESPACE::SampleLocationEXT* pSampleLocations = {}; + }; static_assert( sizeof( SampleLocationsInfoEXT ) == sizeof( VkSampleLocationsInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = SampleLocationsInfoEXT; + }; + struct AttachmentSampleLocationsEXT { - VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( uint32_t attachmentIndex_ = {}, - VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT - : attachmentIndex( attachmentIndex_ ) - , sampleLocationsInfo( sampleLocationsInfo_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT(uint32_t attachmentIndex_ = {}, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}) VULKAN_HPP_NOEXCEPT + : attachmentIndex( attachmentIndex_ ), sampleLocationsInfo( sampleLocationsInfo_ ) {} + VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + AttachmentSampleLocationsEXT( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - AttachmentSampleLocationsEXT& operator=( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + AttachmentSampleLocationsEXT & operator=( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + AttachmentSampleLocationsEXT & operator=( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( AttachmentSampleLocationsEXT ) ); return *this; } @@ -21625,12 +18925,13 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - AttachmentSampleLocationsEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT + AttachmentSampleLocationsEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT { sampleLocationsInfo = sampleLocationsInfo_; return *this; } + operator VkAttachmentSampleLocationsEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -21641,6 +18942,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AttachmentSampleLocationsEXT const& ) const = default; +#else bool operator==( AttachmentSampleLocationsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( attachmentIndex == rhs.attachmentIndex ) @@ -21651,27 +18956,44 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: uint32_t attachmentIndex = {}; VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; + }; static_assert( sizeof( AttachmentSampleLocationsEXT ) == sizeof( VkAttachmentSampleLocationsEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct BaseInStructure { - BaseInStructure() VULKAN_HPP_NOEXCEPT + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + BaseInStructure(VULKAN_HPP_NAMESPACE::StructureType sType_ = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo) VULKAN_HPP_NOEXCEPT + : sType( sType_ ) {} + BaseInStructure( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; + BaseInStructure( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - BaseInStructure& operator=( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT + BaseInStructure & operator=( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BaseInStructure & operator=( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( BaseInStructure ) ); return *this; } @@ -21681,6 +19003,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkBaseInStructure const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -21691,6 +19014,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BaseInStructure const& ) const = default; +#else bool operator==( BaseInStructure const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -21701,27 +19028,44 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: - VULKAN_HPP_NAMESPACE::StructureType sType = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo; const struct VULKAN_HPP_NAMESPACE::BaseInStructure* pNext = {}; + }; static_assert( sizeof( BaseInStructure ) == sizeof( VkBaseInStructure ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct BaseOutStructure { - BaseOutStructure() VULKAN_HPP_NOEXCEPT + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + BaseOutStructure(VULKAN_HPP_NAMESPACE::StructureType sType_ = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo) VULKAN_HPP_NOEXCEPT + : sType( sType_ ) {} + BaseOutStructure( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; + BaseOutStructure( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - BaseOutStructure& operator=( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT + BaseOutStructure & operator=( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BaseOutStructure & operator=( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( BaseOutStructure ) ); return *this; } @@ -21731,6 +19075,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkBaseOutStructure const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -21741,6 +19086,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BaseOutStructure const& ) const = default; +#else bool operator==( BaseOutStructure const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -21751,92 +19100,215 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: - VULKAN_HPP_NAMESPACE::StructureType sType = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo; struct VULKAN_HPP_NAMESPACE::BaseOutStructure* pNext = {}; + }; static_assert( sizeof( BaseOutStructure ) == sizeof( VkBaseOutStructure ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct BindAccelerationStructureMemoryInfoNV + class DeviceMemory { - VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {}, - VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, - uint32_t deviceIndexCount_ = {}, - const uint32_t* pDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT - : accelerationStructure( accelerationStructure_ ) - , memory( memory_ ) - , memoryOffset( memoryOffset_ ) - , deviceIndexCount( deviceIndexCount_ ) - , pDeviceIndices( pDeviceIndices_ ) + public: + using CType = VkDeviceMemory; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory; + + public: + VULKAN_HPP_CONSTEXPR DeviceMemory() VULKAN_HPP_NOEXCEPT + : m_deviceMemory(VK_NULL_HANDLE) {} - VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV & operator=( VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DeviceMemory( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_deviceMemory(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DeviceMemory( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT + : m_deviceMemory( deviceMemory ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DeviceMemory & operator=(VkDeviceMemory deviceMemory) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV ) - offsetof( BindAccelerationStructureMemoryInfoNV, pNext ) ); + m_deviceMemory = deviceMemory; + return *this; + } +#endif + + DeviceMemory & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_deviceMemory = VK_NULL_HANDLE; return *this; } - BindAccelerationStructureMemoryInfoNV( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceMemory const& ) const = default; +#else + bool operator==( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory == rhs.m_deviceMemory; + } + + bool operator!=(DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory != rhs.m_deviceMemory; + } + + bool operator<(DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory < rhs.m_deviceMemory; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeviceMemory() const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory == VK_NULL_HANDLE; + } + + private: + VkDeviceMemory m_deviceMemory; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemory ) == sizeof( VkDeviceMemory ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DeviceMemory; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DeviceMemory; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DeviceMemory; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct BindAccelerationStructureMemoryInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindAccelerationStructureMemoryInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, uint32_t deviceIndexCount_ = {}, const uint32_t* pDeviceIndices_ = {}) VULKAN_HPP_NOEXCEPT + : accelerationStructure( accelerationStructure_ ), memory( memory_ ), memoryOffset( memoryOffset_ ), deviceIndexCount( deviceIndexCount_ ), pDeviceIndices( pDeviceIndices_ ) + {} + + VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoKHR( BindAccelerationStructureMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindAccelerationStructureMemoryInfoKHR( VkBindAccelerationStructureMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - BindAccelerationStructureMemoryInfoNV& operator=( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + BindAccelerationStructureMemoryInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_, VULKAN_HPP_NAMESPACE::DeviceMemory memory_, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_ ) + : accelerationStructure( accelerationStructure_ ), memory( memory_ ), memoryOffset( memoryOffset_ ), deviceIndexCount( static_cast( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + BindAccelerationStructureMemoryInfoKHR & operator=( VkBindAccelerationStructureMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BindAccelerationStructureMemoryInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BindAccelerationStructureMemoryInfoKHR & operator=( BindAccelerationStructureMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( BindAccelerationStructureMemoryInfoKHR ) ); + return *this; + } + + BindAccelerationStructureMemoryInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - BindAccelerationStructureMemoryInfoNV & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + BindAccelerationStructureMemoryInfoKHR & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return *this; } - BindAccelerationStructureMemoryInfoNV & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + BindAccelerationStructureMemoryInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } - BindAccelerationStructureMemoryInfoNV & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + BindAccelerationStructureMemoryInfoKHR & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT { memoryOffset = memoryOffset_; return *this; } - BindAccelerationStructureMemoryInfoNV & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT + BindAccelerationStructureMemoryInfoKHR & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT { deviceIndexCount = deviceIndexCount_; return *this; } - BindAccelerationStructureMemoryInfoNV & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + BindAccelerationStructureMemoryInfoKHR & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT { pDeviceIndices = pDeviceIndices_; return *this; } - operator VkBindAccelerationStructureMemoryInfoNV const&() const VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + BindAccelerationStructureMemoryInfoKHR & setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + deviceIndexCount = static_cast( deviceIndices_.size() ); + pDeviceIndices = deviceIndices_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkBindAccelerationStructureMemoryInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } - operator VkBindAccelerationStructureMemoryInfoNV &() VULKAN_HPP_NOEXCEPT + operator VkBindAccelerationStructureMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - bool operator==( BindAccelerationStructureMemoryInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BindAccelerationStructureMemoryInfoKHR const& ) const = default; +#else + bool operator==( BindAccelerationStructureMemoryInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) @@ -21847,45 +19319,67 @@ namespace VULKAN_HPP_NAMESPACE && ( pDeviceIndices == rhs.pDeviceIndices ); } - bool operator!=( BindAccelerationStructureMemoryInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BindAccelerationStructureMemoryInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif + + public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoNV; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoKHR; const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {}; VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; uint32_t deviceIndexCount = {}; const uint32_t* pDeviceIndices = {}; + }; - static_assert( sizeof( BindAccelerationStructureMemoryInfoNV ) == sizeof( VkBindAccelerationStructureMemoryInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( BindAccelerationStructureMemoryInfoKHR ) == sizeof( VkBindAccelerationStructureMemoryInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = BindAccelerationStructureMemoryInfoKHR; + }; + using BindAccelerationStructureMemoryInfoNV = BindAccelerationStructureMemoryInfoKHR; struct BindBufferMemoryDeviceGroupInfo { - VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {}, - const uint32_t* pDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT - : deviceIndexCount( deviceIndexCount_ ) - , pDeviceIndices( pDeviceIndices_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryDeviceGroupInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo(uint32_t deviceIndexCount_ = {}, const uint32_t* pDeviceIndices_ = {}) VULKAN_HPP_NOEXCEPT + : deviceIndexCount( deviceIndexCount_ ), pDeviceIndices( pDeviceIndices_ ) {} - VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo & operator=( VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo ) - offsetof( BindBufferMemoryDeviceGroupInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindBufferMemoryDeviceGroupInfo( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - BindBufferMemoryDeviceGroupInfo& operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + BindBufferMemoryDeviceGroupInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_ ) + : deviceIndexCount( static_cast( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + BindBufferMemoryDeviceGroupInfo & operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BindBufferMemoryDeviceGroupInfo & operator=( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( BindBufferMemoryDeviceGroupInfo ) ); return *this; } @@ -21907,6 +19401,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + BindBufferMemoryDeviceGroupInfo & setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = static_cast( deviceIndices_.size() ); + pDeviceIndices = deviceIndices_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkBindBufferMemoryDeviceGroupInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -21917,6 +19421,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BindBufferMemoryDeviceGroupInfo const& ) const = default; +#else bool operator==( BindBufferMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -21929,40 +19437,54 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryDeviceGroupInfo; const void* pNext = {}; uint32_t deviceIndexCount = {}; const uint32_t* pDeviceIndices = {}; + }; static_assert( sizeof( BindBufferMemoryDeviceGroupInfo ) == sizeof( VkBindBufferMemoryDeviceGroupInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = BindBufferMemoryDeviceGroupInfo; + }; + using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo; + struct BindBufferMemoryInfo { - VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {} ) VULKAN_HPP_NOEXCEPT - : buffer( buffer_ ) - , memory( memory_ ) - , memoryOffset( memoryOffset_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ), memory( memory_ ), memoryOffset( memoryOffset_ ) {} - VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo & operator=( VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo ) - offsetof( BindBufferMemoryInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindBufferMemoryInfo( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - BindBufferMemoryInfo& operator=( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + BindBufferMemoryInfo & operator=( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BindBufferMemoryInfo & operator=( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( BindBufferMemoryInfo ) ); return *this; } @@ -21990,6 +19512,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkBindBufferMemoryInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -22000,6 +19523,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BindBufferMemoryInfo const& ) const = default; +#else bool operator==( BindBufferMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -22013,6 +19540,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryInfo; @@ -22020,26 +19550,44 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Buffer buffer = {}; VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + }; static_assert( sizeof( BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = BindBufferMemoryInfo; + }; + using BindBufferMemoryInfoKHR = BindBufferMemoryInfo; + struct Offset2D { - VULKAN_HPP_CONSTEXPR Offset2D( int32_t x_ = {}, - int32_t y_ = {} ) VULKAN_HPP_NOEXCEPT - : x( x_ ) - , y( y_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Offset2D(int32_t x_ = {}, int32_t y_ = {}) VULKAN_HPP_NOEXCEPT + : x( x_ ), y( y_ ) {} + VULKAN_HPP_CONSTEXPR Offset2D( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + Offset2D( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - Offset2D& operator=( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT + Offset2D & operator=( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + Offset2D & operator=( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( Offset2D ) ); return *this; } @@ -22055,6 +19603,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkOffset2D const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -22065,6 +19614,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Offset2D const& ) const = default; +#else bool operator==( Offset2D const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( x == rhs.x ) @@ -22075,45 +19628,60 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: int32_t x = {}; int32_t y = {}; + }; static_assert( sizeof( Offset2D ) == sizeof( VkOffset2D ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct Rect2D { - VULKAN_HPP_CONSTEXPR Rect2D( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D extent_ = {} ) VULKAN_HPP_NOEXCEPT - : offset( offset_ ) - , extent( extent_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Rect2D(VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}) VULKAN_HPP_NOEXCEPT + : offset( offset_ ), extent( extent_ ) {} + VULKAN_HPP_CONSTEXPR Rect2D( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + Rect2D( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - Rect2D& operator=( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT + Rect2D & operator=( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - Rect2D & setOffset( VULKAN_HPP_NAMESPACE::Offset2D offset_ ) VULKAN_HPP_NOEXCEPT + Rect2D & operator=( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( Rect2D ) ); + return *this; + } + + Rect2D & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } - Rect2D & setExtent( VULKAN_HPP_NAMESPACE::Extent2D extent_ ) VULKAN_HPP_NOEXCEPT + Rect2D & setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT { extent = extent_; return *this; } + operator VkRect2D const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -22124,6 +19692,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Rect2D const& ) const = default; +#else bool operator==( Rect2D const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( offset == rhs.offset ) @@ -22134,40 +19706,51 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: VULKAN_HPP_NAMESPACE::Offset2D offset = {}; VULKAN_HPP_NAMESPACE::Extent2D extent = {}; + }; static_assert( sizeof( Rect2D ) == sizeof( VkRect2D ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct BindImageMemoryDeviceGroupInfo { - VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {}, - const uint32_t* pDeviceIndices_ = {}, - uint32_t splitInstanceBindRegionCount_ = {}, - const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions_ = {} ) VULKAN_HPP_NOEXCEPT - : deviceIndexCount( deviceIndexCount_ ) - , pDeviceIndices( pDeviceIndices_ ) - , splitInstanceBindRegionCount( splitInstanceBindRegionCount_ ) - , pSplitInstanceBindRegions( pSplitInstanceBindRegions_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryDeviceGroupInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo(uint32_t deviceIndexCount_ = {}, const uint32_t* pDeviceIndices_ = {}, uint32_t splitInstanceBindRegionCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions_ = {}) VULKAN_HPP_NOEXCEPT + : deviceIndexCount( deviceIndexCount_ ), pDeviceIndices( pDeviceIndices_ ), splitInstanceBindRegionCount( splitInstanceBindRegionCount_ ), pSplitInstanceBindRegions( pSplitInstanceBindRegions_ ) {} - VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo & operator=( VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo ) - offsetof( BindImageMemoryDeviceGroupInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindImageMemoryDeviceGroupInfo( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - BindImageMemoryDeviceGroupInfo& operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + BindImageMemoryDeviceGroupInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & splitInstanceBindRegions_ = {} ) + : deviceIndexCount( static_cast( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() ), splitInstanceBindRegionCount( static_cast( splitInstanceBindRegions_.size() ) ), pSplitInstanceBindRegions( splitInstanceBindRegions_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + BindImageMemoryDeviceGroupInfo & operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BindImageMemoryDeviceGroupInfo & operator=( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( BindImageMemoryDeviceGroupInfo ) ); return *this; } @@ -22189,6 +19772,15 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + BindImageMemoryDeviceGroupInfo & setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = static_cast( deviceIndices_.size() ); + pDeviceIndices = deviceIndices_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ ) VULKAN_HPP_NOEXCEPT { splitInstanceBindRegionCount = splitInstanceBindRegionCount_; @@ -22201,6 +19793,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & splitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT + { + splitInstanceBindRegionCount = static_cast( splitInstanceBindRegions_.size() ); + pSplitInstanceBindRegions = splitInstanceBindRegions_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkBindImageMemoryDeviceGroupInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -22211,6 +19813,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BindImageMemoryDeviceGroupInfo const& ) const = default; +#else bool operator==( BindImageMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -22225,6 +19831,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryDeviceGroupInfo; @@ -22233,34 +19842,145 @@ namespace VULKAN_HPP_NAMESPACE const uint32_t* pDeviceIndices = {}; uint32_t splitInstanceBindRegionCount = {}; const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions = {}; + }; static_assert( sizeof( BindImageMemoryDeviceGroupInfo ) == sizeof( VkBindImageMemoryDeviceGroupInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct BindImageMemoryInfo + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, - VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {} ) VULKAN_HPP_NOEXCEPT - : image( image_ ) - , memory( memory_ ) - , memoryOffset( memoryOffset_ ) + using Type = BindImageMemoryDeviceGroupInfo; + }; + using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo; + + class Image + { + public: + using CType = VkImage; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImage; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage; + + public: + VULKAN_HPP_CONSTEXPR Image() VULKAN_HPP_NOEXCEPT + : m_image(VK_NULL_HANDLE) {} - VULKAN_HPP_NAMESPACE::BindImageMemoryInfo & operator=( VULKAN_HPP_NAMESPACE::BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR Image( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_image(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Image( VkImage image ) VULKAN_HPP_NOEXCEPT + : m_image( image ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Image & operator=(VkImage image) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindImageMemoryInfo ) - offsetof( BindImageMemoryInfo, pNext ) ); + m_image = image; return *this; } +#endif + + Image & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_image = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Image const& ) const = default; +#else + bool operator==( Image const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_image == rhs.m_image; + } + + bool operator!=(Image const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_image != rhs.m_image; + } + + bool operator<(Image const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_image < rhs.m_image; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImage() const VULKAN_HPP_NOEXCEPT + { + return m_image; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_image != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_image == VK_NULL_HANDLE; + } + + private: + VkImage m_image; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Image ) == sizeof( VkImage ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Image; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Image; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Image; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct BindImageMemoryInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindImageMemoryInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}) VULKAN_HPP_NOEXCEPT + : image( image_ ), memory( memory_ ), memoryOffset( memoryOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - BindImageMemoryInfo& operator=( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + BindImageMemoryInfo & operator=( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BindImageMemoryInfo & operator=( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( BindImageMemoryInfo ) ); return *this; } @@ -22288,6 +20008,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkBindImageMemoryInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -22298,6 +20019,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BindImageMemoryInfo const& ) const = default; +#else bool operator==( BindImageMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -22311,6 +20036,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryInfo; @@ -22318,32 +20046,45 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Image image = {}; VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + }; static_assert( sizeof( BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = BindImageMemoryInfo; + }; + using BindImageMemoryInfoKHR = BindImageMemoryInfo; + struct BindImageMemorySwapchainInfoKHR { - VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, - uint32_t imageIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : swapchain( swapchain_ ) - , imageIndex( imageIndex_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemorySwapchainInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, uint32_t imageIndex_ = {}) VULKAN_HPP_NOEXCEPT + : swapchain( swapchain_ ), imageIndex( imageIndex_ ) {} - VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR & operator=( VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR ) - offsetof( BindImageMemorySwapchainInfoKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindImageMemorySwapchainInfoKHR( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - BindImageMemorySwapchainInfoKHR& operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + BindImageMemorySwapchainInfoKHR & operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BindImageMemorySwapchainInfoKHR & operator=( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( BindImageMemorySwapchainInfoKHR ) ); return *this; } @@ -22365,6 +20106,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkBindImageMemorySwapchainInfoKHR const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -22375,6 +20117,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BindImageMemorySwapchainInfoKHR const& ) const = default; +#else bool operator==( BindImageMemorySwapchainInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -22387,36 +20133,53 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemorySwapchainInfoKHR; const void* pNext = {}; VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; uint32_t imageIndex = {}; + }; static_assert( sizeof( BindImageMemorySwapchainInfoKHR ) == sizeof( VkBindImageMemorySwapchainInfoKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = BindImageMemorySwapchainInfoKHR; + }; + struct BindImagePlaneMemoryInfo { - VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT - : planeAspect( planeAspect_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImagePlaneMemoryInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo(VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor) VULKAN_HPP_NOEXCEPT + : planeAspect( planeAspect_ ) {} - VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo & operator=( VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo ) - offsetof( BindImagePlaneMemoryInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindImagePlaneMemoryInfo( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - BindImagePlaneMemoryInfo& operator=( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + BindImagePlaneMemoryInfo & operator=( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BindImagePlaneMemoryInfo & operator=( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( BindImagePlaneMemoryInfo ) ); return *this; } @@ -22432,6 +20195,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkBindImagePlaneMemoryInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -22442,6 +20206,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BindImagePlaneMemoryInfo const& ) const = default; +#else bool operator==( BindImagePlaneMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -22453,37 +20221,208 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImagePlaneMemoryInfo; const void* pNext = {}; VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; + }; static_assert( sizeof( BindImagePlaneMemoryInfo ) == sizeof( VkBindImagePlaneMemoryInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = BindImagePlaneMemoryInfo; + }; + using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo; + + struct BindIndexBufferIndirectCommandNV + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV(VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16) VULKAN_HPP_NOEXCEPT + : bufferAddress( bufferAddress_ ), size( size_ ), indexType( indexType_ ) + {} + + VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindIndexBufferIndirectCommandNV( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + BindIndexBufferIndirectCommandNV & operator=( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BindIndexBufferIndirectCommandNV & operator=( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( BindIndexBufferIndirectCommandNV ) ); + return *this; + } + + BindIndexBufferIndirectCommandNV & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferAddress = bufferAddress_; + return *this; + } + + BindIndexBufferIndirectCommandNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + BindIndexBufferIndirectCommandNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + + operator VkBindIndexBufferIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindIndexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BindIndexBufferIndirectCommandNV const& ) const = default; +#else + bool operator==( BindIndexBufferIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( bufferAddress == rhs.bufferAddress ) + && ( size == rhs.size ) + && ( indexType == rhs.indexType ); + } + + bool operator!=( BindIndexBufferIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {}; + uint32_t size = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + + }; + static_assert( sizeof( BindIndexBufferIndirectCommandNV ) == sizeof( VkBindIndexBufferIndirectCommandNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct BindShaderGroupIndirectCommandNV + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV(uint32_t groupIndex_ = {}) VULKAN_HPP_NOEXCEPT + : groupIndex( groupIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindShaderGroupIndirectCommandNV( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + BindShaderGroupIndirectCommandNV & operator=( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BindShaderGroupIndirectCommandNV & operator=( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( BindShaderGroupIndirectCommandNV ) ); + return *this; + } + + BindShaderGroupIndirectCommandNV & setGroupIndex( uint32_t groupIndex_ ) VULKAN_HPP_NOEXCEPT + { + groupIndex = groupIndex_; + return *this; + } + + + operator VkBindShaderGroupIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindShaderGroupIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BindShaderGroupIndirectCommandNV const& ) const = default; +#else + bool operator==( BindShaderGroupIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( groupIndex == rhs.groupIndex ); + } + + bool operator!=( BindShaderGroupIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + uint32_t groupIndex = {}; + + }; + static_assert( sizeof( BindShaderGroupIndirectCommandNV ) == sizeof( VkBindShaderGroupIndirectCommandNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + struct SparseMemoryBind { - VULKAN_HPP_CONSTEXPR SparseMemoryBind( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, - VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, - VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT - : resourceOffset( resourceOffset_ ) - , size( size_ ) - , memory( memory_ ) - , memoryOffset( memoryOffset_ ) - , flags( flags_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseMemoryBind(VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {}) VULKAN_HPP_NOEXCEPT + : resourceOffset( resourceOffset_ ), size( size_ ), memory( memory_ ), memoryOffset( memoryOffset_ ), flags( flags_ ) {} + VULKAN_HPP_CONSTEXPR SparseMemoryBind( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SparseMemoryBind( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - SparseMemoryBind& operator=( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + SparseMemoryBind & operator=( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SparseMemoryBind & operator=( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SparseMemoryBind ) ); return *this; } @@ -22517,6 +20456,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkSparseMemoryBind const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -22527,6 +20467,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SparseMemoryBind const& ) const = default; +#else bool operator==( SparseMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( resourceOffset == rhs.resourceOffset ) @@ -22540,6 +20484,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset = {}; @@ -22547,28 +20494,43 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {}; + }; static_assert( sizeof( SparseMemoryBind ) == sizeof( VkSparseMemoryBind ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SparseBufferMemoryBindInfo { - VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - uint32_t bindCount_ = {}, - const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ = {} ) VULKAN_HPP_NOEXCEPT - : buffer( buffer_ ) - , bindCount( bindCount_ ) - , pBinds( pBinds_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ = {}) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ), bindCount( bindCount_ ), pBinds( pBinds_ ) {} + VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - SparseBufferMemoryBindInfo& operator=( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) + : buffer( buffer_ ), bindCount( static_cast( binds_.size() ) ), pBinds( binds_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SparseBufferMemoryBindInfo & operator=( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SparseBufferMemoryBindInfo & operator=( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SparseBufferMemoryBindInfo ) ); return *this; } @@ -22590,6 +20552,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SparseBufferMemoryBindInfo & setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) VULKAN_HPP_NOEXCEPT + { + bindCount = static_cast( binds_.size() ); + pBinds = binds_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkSparseBufferMemoryBindInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -22600,6 +20572,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SparseBufferMemoryBindInfo const& ) const = default; +#else bool operator==( SparseBufferMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( buffer == rhs.buffer ) @@ -22611,33 +20587,51 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: VULKAN_HPP_NAMESPACE::Buffer buffer = {}; uint32_t bindCount = {}; const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds = {}; + }; static_assert( sizeof( SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SparseImageOpaqueMemoryBindInfo { - VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, - uint32_t bindCount_ = {}, - const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ = {} ) VULKAN_HPP_NOEXCEPT - : image( image_ ) - , bindCount( bindCount_ ) - , pBinds( pBinds_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ = {}) VULKAN_HPP_NOEXCEPT + : image( image_ ), bindCount( bindCount_ ), pBinds( pBinds_ ) {} + VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - SparseImageOpaqueMemoryBindInfo& operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) + : image( image_ ), bindCount( static_cast( binds_.size() ) ), pBinds( binds_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SparseImageOpaqueMemoryBindInfo & operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SparseImageOpaqueMemoryBindInfo & operator=( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SparseImageOpaqueMemoryBindInfo ) ); return *this; } @@ -22659,6 +20653,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SparseImageOpaqueMemoryBindInfo & setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) VULKAN_HPP_NOEXCEPT + { + bindCount = static_cast( binds_.size() ); + pBinds = binds_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkSparseImageOpaqueMemoryBindInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -22669,6 +20673,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SparseImageOpaqueMemoryBindInfo const& ) const = default; +#else bool operator==( SparseImageOpaqueMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( image == rhs.image ) @@ -22680,33 +20688,45 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: VULKAN_HPP_NAMESPACE::Image image = {}; uint32_t bindCount = {}; const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds = {}; + }; static_assert( sizeof( SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ImageSubresource { - VULKAN_HPP_CONSTEXPR ImageSubresource( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, - uint32_t mipLevel_ = {}, - uint32_t arrayLayer_ = {} ) VULKAN_HPP_NOEXCEPT - : aspectMask( aspectMask_ ) - , mipLevel( mipLevel_ ) - , arrayLayer( arrayLayer_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageSubresource(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t mipLevel_ = {}, uint32_t arrayLayer_ = {}) VULKAN_HPP_NOEXCEPT + : aspectMask( aspectMask_ ), mipLevel( mipLevel_ ), arrayLayer( arrayLayer_ ) {} + VULKAN_HPP_CONSTEXPR ImageSubresource( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImageSubresource( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - ImageSubresource& operator=( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT + ImageSubresource & operator=( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageSubresource & operator=( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImageSubresource ) ); return *this; } @@ -22728,6 +20748,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkImageSubresource const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -22738,6 +20759,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageSubresource const& ) const = default; +#else bool operator==( ImageSubresource const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( aspectMask == rhs.aspectMask ) @@ -22749,40 +20774,51 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; uint32_t mipLevel = {}; uint32_t arrayLayer = {}; + }; static_assert( sizeof( ImageSubresource ) == sizeof( VkImageSubresource ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct Offset3D { - VULKAN_HPP_CONSTEXPR Offset3D( int32_t x_ = {}, - int32_t y_ = {}, - int32_t z_ = {} ) VULKAN_HPP_NOEXCEPT - : x( x_ ) - , y( y_ ) - , z( z_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Offset3D(int32_t x_ = {}, int32_t y_ = {}, int32_t z_ = {}) VULKAN_HPP_NOEXCEPT + : x( x_ ), y( y_ ), z( z_ ) {} - explicit Offset3D( Offset2D const& offset2D, - int32_t z_ = {} ) - : x( offset2D.x ) - , y( offset2D.y ) - , z( z_ ) - {} + VULKAN_HPP_CONSTEXPR Offset3D( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default; Offset3D( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - Offset3D& operator=( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT + explicit Offset3D( Offset2D const& offset2D, int32_t z_ = {} ) + : x( offset2D.x ) + , y( offset2D.y ) + , z( z_ ) + {} +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + Offset3D & operator=( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + Offset3D & operator=( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( Offset3D ) ); return *this; } @@ -22804,6 +20840,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkOffset3D const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -22814,6 +20851,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Offset3D const& ) const = default; +#else bool operator==( Offset3D const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( x == rhs.x ) @@ -22825,40 +20866,51 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: int32_t x = {}; int32_t y = {}; int32_t z = {}; + }; static_assert( sizeof( Offset3D ) == sizeof( VkOffset3D ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct Extent3D { - VULKAN_HPP_CONSTEXPR Extent3D( uint32_t width_ = {}, - uint32_t height_ = {}, - uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT - : width( width_ ) - , height( height_ ) - , depth( depth_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Extent3D(uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {}) VULKAN_HPP_NOEXCEPT + : width( width_ ), height( height_ ), depth( depth_ ) {} - explicit Extent3D( Extent2D const& extent2D, - uint32_t depth_ = {} ) - : width( extent2D.width ) - , height( extent2D.height ) - , depth( depth_ ) - {} + VULKAN_HPP_CONSTEXPR Extent3D( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default; Extent3D( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - Extent3D& operator=( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT + explicit Extent3D( Extent2D const& extent2D, uint32_t depth_ = {} ) + : width( extent2D.width ) + , height( extent2D.height ) + , depth( depth_ ) + {} +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + Extent3D & operator=( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + Extent3D & operator=( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( Extent3D ) ); return *this; } @@ -22880,6 +20932,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkExtent3D const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -22890,6 +20943,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Extent3D const& ) const = default; +#else bool operator==( Extent3D const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( width == rhs.width ) @@ -22901,55 +20958,61 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: uint32_t width = {}; uint32_t height = {}; uint32_t depth = {}; + }; static_assert( sizeof( Extent3D ) == sizeof( VkExtent3D ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SparseImageMemoryBind { - VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ = {}, - VULKAN_HPP_NAMESPACE::Offset3D offset_ = {}, - VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, - VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, - VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT - : subresource( subresource_ ) - , offset( offset_ ) - , extent( extent_ ) - , memory( memory_ ) - , memoryOffset( memoryOffset_ ) - , flags( flags_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageMemoryBind(VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {}) VULKAN_HPP_NOEXCEPT + : subresource( subresource_ ), offset( offset_ ), extent( extent_ ), memory( memory_ ), memoryOffset( memoryOffset_ ), flags( flags_ ) {} + VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - SparseImageMemoryBind& operator=( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + SparseImageMemoryBind & operator=( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SparseImageMemoryBind & setSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ ) VULKAN_HPP_NOEXCEPT + SparseImageMemoryBind & operator=( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SparseImageMemoryBind ) ); + return *this; + } + + SparseImageMemoryBind & setSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & subresource_ ) VULKAN_HPP_NOEXCEPT { subresource = subresource_; return *this; } - SparseImageMemoryBind & setOffset( VULKAN_HPP_NAMESPACE::Offset3D offset_ ) VULKAN_HPP_NOEXCEPT + SparseImageMemoryBind & setOffset( VULKAN_HPP_NAMESPACE::Offset3D const & offset_ ) VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } - SparseImageMemoryBind & setExtent( VULKAN_HPP_NAMESPACE::Extent3D extent_ ) VULKAN_HPP_NOEXCEPT + SparseImageMemoryBind & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT { extent = extent_; return *this; @@ -22973,6 +21036,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkSparseImageMemoryBind const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -22983,6 +21047,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SparseImageMemoryBind const& ) const = default; +#else bool operator==( SparseImageMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( subresource == rhs.subresource ) @@ -22997,6 +21065,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: VULKAN_HPP_NAMESPACE::ImageSubresource subresource = {}; @@ -23005,28 +21076,43 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {}; + }; static_assert( sizeof( SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SparseImageMemoryBindInfo { - VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, - uint32_t bindCount_ = {}, - const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds_ = {} ) VULKAN_HPP_NOEXCEPT - : image( image_ ) - , bindCount( bindCount_ ) - , pBinds( pBinds_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds_ = {}) VULKAN_HPP_NOEXCEPT + : image( image_ ), bindCount( bindCount_ ), pBinds( pBinds_ ) {} + VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - SparseImageMemoryBindInfo& operator=( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) + : image( image_ ), bindCount( static_cast( binds_.size() ) ), pBinds( binds_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SparseImageMemoryBindInfo & operator=( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SparseImageMemoryBindInfo & operator=( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SparseImageMemoryBindInfo ) ); return *this; } @@ -23048,6 +21134,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SparseImageMemoryBindInfo & setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) VULKAN_HPP_NOEXCEPT + { + bindCount = static_cast( binds_.size() ); + pBinds = binds_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkSparseImageMemoryBindInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -23058,6 +21154,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SparseImageMemoryBindInfo const& ) const = default; +#else bool operator==( SparseImageMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( image == rhs.image ) @@ -23069,53 +21169,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: VULKAN_HPP_NAMESPACE::Image image = {}; uint32_t bindCount = {}; const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds = {}; + }; static_assert( sizeof( SparseImageMemoryBindInfo ) == sizeof( VkSparseImageMemoryBindInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct BindSparseInfo { - VULKAN_HPP_CONSTEXPR BindSparseInfo( uint32_t waitSemaphoreCount_ = {}, - const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {}, - uint32_t bufferBindCount_ = {}, - const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo* pBufferBinds_ = {}, - uint32_t imageOpaqueBindCount_ = {}, - const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ = {}, - uint32_t imageBindCount_ = {}, - const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds_ = {}, - uint32_t signalSemaphoreCount_ = {}, - const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ = {} ) VULKAN_HPP_NOEXCEPT - : waitSemaphoreCount( waitSemaphoreCount_ ) - , pWaitSemaphores( pWaitSemaphores_ ) - , bufferBindCount( bufferBindCount_ ) - , pBufferBinds( pBufferBinds_ ) - , imageOpaqueBindCount( imageOpaqueBindCount_ ) - , pImageOpaqueBinds( pImageOpaqueBinds_ ) - , imageBindCount( imageBindCount_ ) - , pImageBinds( pImageBinds_ ) - , signalSemaphoreCount( signalSemaphoreCount_ ) - , pSignalSemaphores( pSignalSemaphores_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindSparseInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindSparseInfo(uint32_t waitSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {}, uint32_t bufferBindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo* pBufferBinds_ = {}, uint32_t imageOpaqueBindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ = {}, uint32_t imageBindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds_ = {}, uint32_t signalSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ = {}) VULKAN_HPP_NOEXCEPT + : waitSemaphoreCount( waitSemaphoreCount_ ), pWaitSemaphores( pWaitSemaphores_ ), bufferBindCount( bufferBindCount_ ), pBufferBinds( pBufferBinds_ ), imageOpaqueBindCount( imageOpaqueBindCount_ ), pImageOpaqueBinds( pImageOpaqueBinds_ ), imageBindCount( imageBindCount_ ), pImageBinds( pImageBinds_ ), signalSemaphoreCount( signalSemaphoreCount_ ), pSignalSemaphores( pSignalSemaphores_ ) {} - VULKAN_HPP_NAMESPACE::BindSparseInfo & operator=( VULKAN_HPP_NAMESPACE::BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindSparseInfo ) - offsetof( BindSparseInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR BindSparseInfo( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindSparseInfo( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - BindSparseInfo& operator=( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + BindSparseInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferBinds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageOpaqueBinds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageBinds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphores_ = {} ) + : waitSemaphoreCount( static_cast( waitSemaphores_.size() ) ), pWaitSemaphores( waitSemaphores_.data() ), bufferBindCount( static_cast( bufferBinds_.size() ) ), pBufferBinds( bufferBinds_.data() ), imageOpaqueBindCount( static_cast( imageOpaqueBinds_.size() ) ), pImageOpaqueBinds( imageOpaqueBinds_.data() ), imageBindCount( static_cast( imageBinds_.size() ) ), pImageBinds( imageBinds_.data() ), signalSemaphoreCount( static_cast( signalSemaphores_.size() ) ), pSignalSemaphores( signalSemaphores_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + BindSparseInfo & operator=( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BindSparseInfo & operator=( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( BindSparseInfo ) ); return *this; } @@ -23137,6 +21236,15 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + BindSparseInfo & setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitSemaphores_.size() ); + pWaitSemaphores = waitSemaphores_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + BindSparseInfo & setBufferBindCount( uint32_t bufferBindCount_ ) VULKAN_HPP_NOEXCEPT { bufferBindCount = bufferBindCount_; @@ -23149,6 +21257,15 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + BindSparseInfo & setBufferBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferBinds_ ) VULKAN_HPP_NOEXCEPT + { + bufferBindCount = static_cast( bufferBinds_.size() ); + pBufferBinds = bufferBinds_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + BindSparseInfo & setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ ) VULKAN_HPP_NOEXCEPT { imageOpaqueBindCount = imageOpaqueBindCount_; @@ -23161,6 +21278,15 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + BindSparseInfo & setImageOpaqueBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT + { + imageOpaqueBindCount = static_cast( imageOpaqueBinds_.size() ); + pImageOpaqueBinds = imageOpaqueBinds_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + BindSparseInfo & setImageBindCount( uint32_t imageBindCount_ ) VULKAN_HPP_NOEXCEPT { imageBindCount = imageBindCount_; @@ -23173,6 +21299,15 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + BindSparseInfo & setImageBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageBinds_ ) VULKAN_HPP_NOEXCEPT + { + imageBindCount = static_cast( imageBinds_.size() ); + pImageBinds = imageBinds_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + BindSparseInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT { signalSemaphoreCount = signalSemaphoreCount_; @@ -23185,6 +21320,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + BindSparseInfo & setSignalSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = static_cast( signalSemaphores_.size() ); + pSignalSemaphores = signalSemaphores_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkBindSparseInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -23195,6 +21340,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BindSparseInfo const& ) const = default; +#else bool operator==( BindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -23215,6 +21364,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindSparseInfo; @@ -23229,28 +21381,484 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds = {}; uint32_t signalSemaphoreCount = {}; const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores = {}; + }; static_assert( sizeof( BindSparseInfo ) == sizeof( VkBindSparseInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = BindSparseInfo; + }; + + struct BindVertexBufferIndirectCommandNV + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV(VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, uint32_t stride_ = {}) VULKAN_HPP_NOEXCEPT + : bufferAddress( bufferAddress_ ), size( size_ ), stride( stride_ ) + {} + + VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindVertexBufferIndirectCommandNV( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + BindVertexBufferIndirectCommandNV & operator=( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BindVertexBufferIndirectCommandNV & operator=( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( BindVertexBufferIndirectCommandNV ) ); + return *this; + } + + BindVertexBufferIndirectCommandNV & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferAddress = bufferAddress_; + return *this; + } + + BindVertexBufferIndirectCommandNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + BindVertexBufferIndirectCommandNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + + operator VkBindVertexBufferIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindVertexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BindVertexBufferIndirectCommandNV const& ) const = default; +#else + bool operator==( BindVertexBufferIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( bufferAddress == rhs.bufferAddress ) + && ( size == rhs.size ) + && ( stride == rhs.stride ); + } + + bool operator!=( BindVertexBufferIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {}; + uint32_t size = {}; + uint32_t stride = {}; + + }; + static_assert( sizeof( BindVertexBufferIndirectCommandNV ) == sizeof( VkBindVertexBufferIndirectCommandNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageSubresourceLayers + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageSubresourceLayers(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t mipLevel_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {}) VULKAN_HPP_NOEXCEPT + : aspectMask( aspectMask_ ), mipLevel( mipLevel_ ), baseArrayLayer( baseArrayLayer_ ), layerCount( layerCount_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresourceLayers( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ImageSubresourceLayers & operator=( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageSubresourceLayers & operator=( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImageSubresourceLayers ) ); + return *this; + } + + ImageSubresourceLayers & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } + + ImageSubresourceLayers & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT + { + mipLevel = mipLevel_; + return *this; + } + + ImageSubresourceLayers & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + ImageSubresourceLayers & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } + + + operator VkImageSubresourceLayers const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSubresourceLayers &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageSubresourceLayers const& ) const = default; +#else + bool operator==( ImageSubresourceLayers const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( aspectMask == rhs.aspectMask ) + && ( mipLevel == rhs.mipLevel ) + && ( baseArrayLayer == rhs.baseArrayLayer ) + && ( layerCount == rhs.layerCount ); + } + + bool operator!=( ImageSubresourceLayers const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t mipLevel = {}; + uint32_t baseArrayLayer = {}; + uint32_t layerCount = {}; + + }; + static_assert( sizeof( ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageBlit2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageBlit2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 ImageBlit2KHR(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, std::array const& srcOffsets_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, std::array const& dstOffsets_ = {}) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ), srcOffsets( srcOffsets_ ), dstSubresource( dstSubresource_ ), dstOffsets( dstOffsets_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 ImageBlit2KHR( ImageBlit2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageBlit2KHR( VkImageBlit2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ImageBlit2KHR & operator=( VkImageBlit2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageBlit2KHR & operator=( ImageBlit2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImageBlit2KHR ) ); + return *this; + } + + ImageBlit2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageBlit2KHR & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + ImageBlit2KHR & setSrcOffsets( std::array const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT + { + srcOffsets = srcOffsets_; + return *this; + } + + ImageBlit2KHR & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + ImageBlit2KHR & setDstOffsets( std::array const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT + { + dstOffsets = dstOffsets_; + return *this; + } + + + operator VkImageBlit2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageBlit2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageBlit2KHR const& ) const = default; +#else + bool operator==( ImageBlit2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcSubresource == rhs.srcSubresource ) + && ( srcOffsets == rhs.srcOffsets ) + && ( dstSubresource == rhs.dstSubresource ) + && ( dstOffsets == rhs.dstOffsets ); + } + + bool operator!=( ImageBlit2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageBlit2KHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D srcOffsets = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D dstOffsets = {}; + + }; + static_assert( sizeof( ImageBlit2KHR ) == sizeof( VkImageBlit2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageBlit2KHR; + }; + + struct BlitImageInfo2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBlitImageInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2KHR(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageBlit2KHR* pRegions_ = {}, VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest) VULKAN_HPP_NOEXCEPT + : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ ), filter( filter_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2KHR( BlitImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BlitImageInfo2KHR( VkBlitImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + BlitImageInfo2KHR( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest ) + : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast( regions_.size() ) ), pRegions( regions_.data() ), filter( filter_ ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + BlitImageInfo2KHR & operator=( VkBlitImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BlitImageInfo2KHR & operator=( BlitImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( BlitImageInfo2KHR ) ); + return *this; + } + + BlitImageInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BlitImageInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + BlitImageInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + BlitImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + BlitImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + BlitImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + BlitImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::ImageBlit2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + BlitImageInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + BlitImageInfo2KHR & setFilter( VULKAN_HPP_NAMESPACE::Filter filter_ ) VULKAN_HPP_NOEXCEPT + { + filter = filter_; + return *this; + } + + + operator VkBlitImageInfo2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBlitImageInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BlitImageInfo2KHR const& ) const = default; +#else + bool operator==( BlitImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcImage == rhs.srcImage ) + && ( srcImageLayout == rhs.srcImageLayout ) + && ( dstImage == rhs.dstImage ) + && ( dstImageLayout == rhs.dstImageLayout ) + && ( regionCount == rhs.regionCount ) + && ( pRegions == rhs.pRegions ) + && ( filter == rhs.filter ); + } + + bool operator!=( BlitImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBlitImageInfo2KHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageBlit2KHR* pRegions = {}; + VULKAN_HPP_NAMESPACE::Filter filter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + + }; + static_assert( sizeof( BlitImageInfo2KHR ) == sizeof( VkBlitImageInfo2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = BlitImageInfo2KHR; + }; + struct BufferCopy { - VULKAN_HPP_CONSTEXPR BufferCopy( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT - : srcOffset( srcOffset_ ) - , dstOffset( dstOffset_ ) - , size( size_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCopy(VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT + : srcOffset( srcOffset_ ), dstOffset( dstOffset_ ), size( size_ ) {} + VULKAN_HPP_CONSTEXPR BufferCopy( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + BufferCopy( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - BufferCopy& operator=( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT + BufferCopy & operator=( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BufferCopy & operator=( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( BufferCopy ) ); return *this; } @@ -23272,6 +21880,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkBufferCopy const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -23282,6 +21891,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BufferCopy const& ) const = default; +#else bool operator==( BufferCopy const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( srcOffset == rhs.srcOffset ) @@ -23293,45 +21906,155 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {}; VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {}; VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; static_assert( sizeof( BufferCopy ) == sizeof( VkBufferCopy ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct BufferCreateInfo + struct BufferCopy2KHR { - VULKAN_HPP_CONSTEXPR BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, - VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, - VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, - uint32_t queueFamilyIndexCount_ = {}, - const uint32_t* pQueueFamilyIndices_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , size( size_ ) - , usage( usage_ ) - , sharingMode( sharingMode_ ) - , queueFamilyIndexCount( queueFamilyIndexCount_ ) - , pQueueFamilyIndices( pQueueFamilyIndices_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCopy2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCopy2KHR(VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT + : srcOffset( srcOffset_ ), dstOffset( dstOffset_ ), size( size_ ) {} - VULKAN_HPP_NAMESPACE::BufferCreateInfo & operator=( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR BufferCopy2KHR( BufferCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCopy2KHR( VkBufferCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BufferCreateInfo ) - offsetof( BufferCreateInfo, pNext ) ); + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + BufferCopy2KHR & operator=( VkBufferCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } + BufferCopy2KHR & operator=( BufferCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( BufferCopy2KHR ) ); + return *this; + } + + BufferCopy2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BufferCopy2KHR & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + BufferCopy2KHR & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + BufferCopy2KHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + + operator VkBufferCopy2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCopy2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BufferCopy2KHR const& ) const = default; +#else + bool operator==( BufferCopy2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcOffset == rhs.srcOffset ) + && ( dstOffset == rhs.dstOffset ) + && ( size == rhs.size ); + } + + bool operator!=( BufferCopy2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCopy2KHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + + }; + static_assert( sizeof( BufferCopy2KHR ) == sizeof( VkBufferCopy2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = BufferCopy2KHR; + }; + + struct BufferCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCreateInfo(VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t* pQueueFamilyIndices_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), size( size_ ), usage( usage_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( queueFamilyIndexCount_ ), pQueueFamilyIndices( pQueueFamilyIndices_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferCreateInfo( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + BufferCreateInfo( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - BufferCreateInfo& operator=( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_, VULKAN_HPP_NAMESPACE::DeviceSize size_, VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) + : flags( flags_ ), size( size_ ), usage( usage_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + BufferCreateInfo & operator=( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BufferCreateInfo & operator=( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( BufferCreateInfo ) ); return *this; } @@ -23377,6 +22100,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + BufferCreateInfo & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); + pQueueFamilyIndices = queueFamilyIndices_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkBufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -23387,6 +22120,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BufferCreateInfo const& ) const = default; +#else bool operator==( BufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -23403,6 +22140,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCreateInfo; @@ -23413,30 +22153,44 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; uint32_t queueFamilyIndexCount = {}; const uint32_t* pQueueFamilyIndices = {}; + }; static_assert( sizeof( BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = BufferCreateInfo; + }; + struct BufferDeviceAddressCreateInfoEXT { - VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) VULKAN_HPP_NOEXCEPT - : deviceAddress( deviceAddress_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}) VULKAN_HPP_NOEXCEPT + : deviceAddress( deviceAddress_ ) {} - VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT ) - offsetof( BufferDeviceAddressCreateInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferDeviceAddressCreateInfoEXT( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - BufferDeviceAddressCreateInfoEXT& operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + BufferDeviceAddressCreateInfoEXT & operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BufferDeviceAddressCreateInfoEXT & operator=( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( BufferDeviceAddressCreateInfoEXT ) ); return *this; } @@ -23452,6 +22206,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkBufferDeviceAddressCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -23462,6 +22217,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BufferDeviceAddressCreateInfoEXT const& ) const = default; +#else bool operator==( BufferDeviceAddressCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -23473,35 +22232,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressCreateInfoEXT; const void* pNext = {}; VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + }; static_assert( sizeof( BufferDeviceAddressCreateInfoEXT ) == sizeof( VkBufferDeviceAddressCreateInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = BufferDeviceAddressCreateInfoEXT; + }; + struct BufferDeviceAddressInfo { - VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT - : buffer( buffer_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) {} - VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & operator=( VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo ) - offsetof( BufferDeviceAddressInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferDeviceAddressInfo( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - BufferDeviceAddressInfo& operator=( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT + BufferDeviceAddressInfo & operator=( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BufferDeviceAddressInfo & operator=( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( BufferDeviceAddressInfo ) ); return *this; } @@ -23517,6 +22293,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkBufferDeviceAddressInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -23527,6 +22304,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BufferDeviceAddressInfo const& ) const = default; +#else bool operator==( BufferDeviceAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -23538,118 +22319,53 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressInfo; const void* pNext = {}; VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; static_assert( sizeof( BufferDeviceAddressInfo ) == sizeof( VkBufferDeviceAddressInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct ImageSubresourceLayers + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, - uint32_t mipLevel_ = {}, - uint32_t baseArrayLayer_ = {}, - uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT - : aspectMask( aspectMask_ ) - , mipLevel( mipLevel_ ) - , baseArrayLayer( baseArrayLayer_ ) - , layerCount( layerCount_ ) - {} - - ImageSubresourceLayers( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageSubresourceLayers& operator=( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageSubresourceLayers & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT - { - aspectMask = aspectMask_; - return *this; - } - - ImageSubresourceLayers & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT - { - mipLevel = mipLevel_; - return *this; - } - - ImageSubresourceLayers & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT - { - baseArrayLayer = baseArrayLayer_; - return *this; - } - - ImageSubresourceLayers & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT - { - layerCount = layerCount_; - return *this; - } - - operator VkImageSubresourceLayers const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageSubresourceLayers &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageSubresourceLayers const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( aspectMask == rhs.aspectMask ) - && ( mipLevel == rhs.mipLevel ) - && ( baseArrayLayer == rhs.baseArrayLayer ) - && ( layerCount == rhs.layerCount ); - } - - bool operator!=( ImageSubresourceLayers const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; - uint32_t mipLevel = {}; - uint32_t baseArrayLayer = {}; - uint32_t layerCount = {}; + using Type = BufferDeviceAddressInfo; }; - static_assert( sizeof( ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + using BufferDeviceAddressInfoEXT = BufferDeviceAddressInfo; + using BufferDeviceAddressInfoKHR = BufferDeviceAddressInfo; struct BufferImageCopy { - VULKAN_HPP_CONSTEXPR BufferImageCopy( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, - uint32_t bufferRowLength_ = {}, - uint32_t bufferImageHeight_ = {}, - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, - VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, - VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT - : bufferOffset( bufferOffset_ ) - , bufferRowLength( bufferRowLength_ ) - , bufferImageHeight( bufferImageHeight_ ) - , imageSubresource( imageSubresource_ ) - , imageOffset( imageOffset_ ) - , imageExtent( imageExtent_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferImageCopy(VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, uint32_t bufferRowLength_ = {}, uint32_t bufferImageHeight_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}) VULKAN_HPP_NOEXCEPT + : bufferOffset( bufferOffset_ ), bufferRowLength( bufferRowLength_ ), bufferImageHeight( bufferImageHeight_ ), imageSubresource( imageSubresource_ ), imageOffset( imageOffset_ ), imageExtent( imageExtent_ ) {} + VULKAN_HPP_CONSTEXPR BufferImageCopy( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + BufferImageCopy( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - BufferImageCopy& operator=( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT + BufferImageCopy & operator=( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BufferImageCopy & operator=( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( BufferImageCopy ) ); return *this; } @@ -23671,24 +22387,25 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - BufferImageCopy & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ ) VULKAN_HPP_NOEXCEPT + BufferImageCopy & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT { imageSubresource = imageSubresource_; return *this; } - BufferImageCopy & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ ) VULKAN_HPP_NOEXCEPT + BufferImageCopy & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT { imageOffset = imageOffset_; return *this; } - BufferImageCopy & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ ) VULKAN_HPP_NOEXCEPT + BufferImageCopy & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT { imageExtent = imageExtent_; return *this; } + operator VkBufferImageCopy const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -23699,6 +22416,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BufferImageCopy const& ) const = default; +#else bool operator==( BufferImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( bufferOffset == rhs.bufferOffset ) @@ -23713,6 +22434,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {}; @@ -23721,42 +22445,165 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; + }; static_assert( sizeof( BufferImageCopy ) == sizeof( VkBufferImageCopy ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct BufferMemoryBarrier + struct BufferImageCopy2KHR { - VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, - uint32_t srcQueueFamilyIndex_ = {}, - uint32_t dstQueueFamilyIndex_ = {}, - VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT - : srcAccessMask( srcAccessMask_ ) - , dstAccessMask( dstAccessMask_ ) - , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) - , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) - , buffer( buffer_ ) - , offset( offset_ ) - , size( size_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferImageCopy2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferImageCopy2KHR(VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, uint32_t bufferRowLength_ = {}, uint32_t bufferImageHeight_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}) VULKAN_HPP_NOEXCEPT + : bufferOffset( bufferOffset_ ), bufferRowLength( bufferRowLength_ ), bufferImageHeight( bufferImageHeight_ ), imageSubresource( imageSubresource_ ), imageOffset( imageOffset_ ), imageExtent( imageExtent_ ) {} - VULKAN_HPP_NAMESPACE::BufferMemoryBarrier & operator=( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR BufferImageCopy2KHR( BufferImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferImageCopy2KHR( VkBufferImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier ) - offsetof( BufferMemoryBarrier, pNext ) ); + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + BufferImageCopy2KHR & operator=( VkBufferImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } + BufferImageCopy2KHR & operator=( BufferImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( BufferImageCopy2KHR ) ); + return *this; + } + + BufferImageCopy2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BufferImageCopy2KHR & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT + { + bufferOffset = bufferOffset_; + return *this; + } + + BufferImageCopy2KHR & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT + { + bufferRowLength = bufferRowLength_; + return *this; + } + + BufferImageCopy2KHR & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT + { + bufferImageHeight = bufferImageHeight_; + return *this; + } + + BufferImageCopy2KHR & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT + { + imageSubresource = imageSubresource_; + return *this; + } + + BufferImageCopy2KHR & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT + { + imageOffset = imageOffset_; + return *this; + } + + BufferImageCopy2KHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } + + + operator VkBufferImageCopy2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferImageCopy2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BufferImageCopy2KHR const& ) const = default; +#else + bool operator==( BufferImageCopy2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( bufferOffset == rhs.bufferOffset ) + && ( bufferRowLength == rhs.bufferRowLength ) + && ( bufferImageHeight == rhs.bufferImageHeight ) + && ( imageSubresource == rhs.imageSubresource ) + && ( imageOffset == rhs.imageOffset ) + && ( imageExtent == rhs.imageExtent ); + } + + bool operator!=( BufferImageCopy2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferImageCopy2KHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {}; + uint32_t bufferRowLength = {}; + uint32_t bufferImageHeight = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; + + }; + static_assert( sizeof( BufferImageCopy2KHR ) == sizeof( VkBufferImageCopy2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = BufferImageCopy2KHR; + }; + + struct BufferMemoryBarrier + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferMemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT + : srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ), srcQueueFamilyIndex( srcQueueFamilyIndex_ ), dstQueueFamilyIndex( dstQueueFamilyIndex_ ), buffer( buffer_ ), offset( offset_ ), size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; + BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - BufferMemoryBarrier& operator=( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + BufferMemoryBarrier & operator=( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BufferMemoryBarrier & operator=( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( BufferMemoryBarrier ) ); return *this; } @@ -23808,6 +22655,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkBufferMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -23818,6 +22666,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BufferMemoryBarrier const& ) const = default; +#else bool operator==( BufferMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -23835,6 +22687,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier; @@ -23846,30 +22701,44 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Buffer buffer = {}; VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; static_assert( sizeof( BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = BufferMemoryBarrier; + }; + struct BufferMemoryRequirementsInfo2 { - VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT - : buffer( buffer_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryRequirementsInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) {} - VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & operator=( VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 ) - offsetof( BufferMemoryRequirementsInfo2, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferMemoryRequirementsInfo2( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - BufferMemoryRequirementsInfo2& operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + BufferMemoryRequirementsInfo2 & operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BufferMemoryRequirementsInfo2 & operator=( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( BufferMemoryRequirementsInfo2 ) ); return *this; } @@ -23885,6 +22754,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkBufferMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -23895,6 +22765,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BufferMemoryRequirementsInfo2 const& ) const = default; +#else bool operator==( BufferMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -23906,35 +22780,53 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryRequirementsInfo2; const void* pNext = {}; VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; static_assert( sizeof( BufferMemoryRequirementsInfo2 ) == sizeof( VkBufferMemoryRequirementsInfo2 ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = BufferMemoryRequirementsInfo2; + }; + using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2; + struct BufferOpaqueCaptureAddressCreateInfo { - VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( uint64_t opaqueCaptureAddress_ = {} ) VULKAN_HPP_NOEXCEPT - : opaqueCaptureAddress( opaqueCaptureAddress_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferOpaqueCaptureAddressCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo(uint64_t opaqueCaptureAddress_ = {}) VULKAN_HPP_NOEXCEPT + : opaqueCaptureAddress( opaqueCaptureAddress_ ) {} - VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo & operator=( VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo ) - offsetof( BufferOpaqueCaptureAddressCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferOpaqueCaptureAddressCreateInfo( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - BufferOpaqueCaptureAddressCreateInfo& operator=( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + BufferOpaqueCaptureAddressCreateInfo & operator=( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BufferOpaqueCaptureAddressCreateInfo & operator=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( BufferOpaqueCaptureAddressCreateInfo ) ); return *this; } @@ -23950,6 +22842,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkBufferOpaqueCaptureAddressCreateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -23960,6 +22853,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BufferOpaqueCaptureAddressCreateInfo const& ) const = default; +#else bool operator==( BufferOpaqueCaptureAddressCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -23971,43 +22868,53 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferOpaqueCaptureAddressCreateInfo; const void* pNext = {}; uint64_t opaqueCaptureAddress = {}; + }; static_assert( sizeof( BufferOpaqueCaptureAddressCreateInfo ) == sizeof( VkBufferOpaqueCaptureAddressCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = BufferOpaqueCaptureAddressCreateInfo; + }; + using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo; + struct BufferViewCreateInfo { - VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , buffer( buffer_ ) - , format( format_ ) - , offset( offset_ ) - , range( range_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferViewCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferViewCreateInfo(VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize range_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), buffer( buffer_ ), format( format_ ), offset( offset_ ), range( range_ ) {} - VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & operator=( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo ) - offsetof( BufferViewCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - BufferViewCreateInfo& operator=( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + BufferViewCreateInfo & operator=( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BufferViewCreateInfo & operator=( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( BufferViewCreateInfo ) ); return *this; } @@ -24047,6 +22954,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkBufferViewCreateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -24057,6 +22965,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BufferViewCreateInfo const& ) const = default; +#else bool operator==( BufferViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -24072,6 +22984,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferViewCreateInfo; @@ -24081,30 +22996,44 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; VULKAN_HPP_NAMESPACE::DeviceSize range = {}; + }; static_assert( sizeof( BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = BufferViewCreateInfo; + }; + struct CalibratedTimestampInfoEXT { - VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoEXT( VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice ) VULKAN_HPP_NOEXCEPT - : timeDomain( timeDomain_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCalibratedTimestampInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoEXT(VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice) VULKAN_HPP_NOEXCEPT + : timeDomain( timeDomain_ ) {} - VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & operator=( VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT ) - offsetof( CalibratedTimestampInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoEXT( CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; CalibratedTimestampInfoEXT( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - CalibratedTimestampInfoEXT& operator=( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + CalibratedTimestampInfoEXT & operator=( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CalibratedTimestampInfoEXT & operator=( CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( CalibratedTimestampInfoEXT ) ); return *this; } @@ -24120,6 +23049,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkCalibratedTimestampInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -24130,6 +23060,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CalibratedTimestampInfoEXT const& ) const = default; +#else bool operator==( CalibratedTimestampInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -24141,40 +23075,56 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCalibratedTimestampInfoEXT; const void* pNext = {}; VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice; + }; static_assert( sizeof( CalibratedTimestampInfoEXT ) == sizeof( VkCalibratedTimestampInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = CalibratedTimestampInfoEXT; + }; + struct CheckpointDataNV { - CheckpointDataNV( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe, - void* pCheckpointMarker_ = {} ) VULKAN_HPP_NOEXCEPT - : stage( stage_ ) - , pCheckpointMarker( pCheckpointMarker_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointDataNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CheckpointDataNV(VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe, void* pCheckpointMarker_ = {}) VULKAN_HPP_NOEXCEPT + : stage( stage_ ), pCheckpointMarker( pCheckpointMarker_ ) {} - VULKAN_HPP_NAMESPACE::CheckpointDataNV & operator=( VULKAN_HPP_NAMESPACE::CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CheckpointDataNV ) - offsetof( CheckpointDataNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR CheckpointDataNV( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; CheckpointDataNV( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - CheckpointDataNV& operator=( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + CheckpointDataNV & operator=( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + CheckpointDataNV & operator=( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( CheckpointDataNV ) ); + return *this; + } + + operator VkCheckpointDataNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -24185,6 +23135,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CheckpointDataNV const& ) const = default; +#else bool operator==( CheckpointDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -24197,54 +23151,66 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointDataNV; void* pNext = {}; VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe; void* pCheckpointMarker = {}; + }; static_assert( sizeof( CheckpointDataNV ) == sizeof( VkCheckpointDataNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = CheckpointDataNV; + }; + union ClearColorValue { - ClearColorValue( const std::array& float32_ = {} ) + ClearColorValue( VULKAN_HPP_NAMESPACE::ClearColorValue const& rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( float32, float32_.data(), 4 * sizeof( float ) ); + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) ); } + ClearColorValue( const std::array& float32_ = {} ) + : float32( float32_ ) + {} + ClearColorValue( const std::array& int32_ ) - { - memcpy( int32, int32_.data(), 4 * sizeof( int32_t ) ); - } + : int32( int32_ ) + {} ClearColorValue( const std::array& uint32_ ) - { - memcpy( uint32, uint32_.data(), 4 * sizeof( uint32_t ) ); - } + : uint32( uint32_ ) + {} ClearColorValue & setFloat32( std::array float32_ ) VULKAN_HPP_NOEXCEPT { - memcpy( float32, float32_.data(), 4 * sizeof( float ) ); + float32 = float32_; return *this; } ClearColorValue & setInt32( std::array int32_ ) VULKAN_HPP_NOEXCEPT { - memcpy( int32, int32_.data(), 4 * sizeof( int32_t ) ); + int32 = int32_; return *this; } ClearColorValue & setUint32( std::array uint32_ ) VULKAN_HPP_NOEXCEPT { - memcpy( uint32, uint32_.data(), 4 * sizeof( uint32_t ) ); + uint32 = uint32_; return *this; } VULKAN_HPP_NAMESPACE::ClearColorValue & operator=( VULKAN_HPP_NAMESPACE::ClearColorValue const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( this, &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) ); + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) ); return *this; } @@ -24258,27 +23224,37 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast(this); } - float float32[4]; - int32_t int32[4]; - uint32_t uint32[4]; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D float32; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D int32; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D uint32; }; struct ClearDepthStencilValue { - VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( float depth_ = {}, - uint32_t stencil_ = {} ) VULKAN_HPP_NOEXCEPT - : depth( depth_ ) - , stencil( stencil_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ClearDepthStencilValue(float depth_ = {}, uint32_t stencil_ = {}) VULKAN_HPP_NOEXCEPT + : depth( depth_ ), stencil( stencil_ ) {} + VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ClearDepthStencilValue( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - ClearDepthStencilValue& operator=( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT + ClearDepthStencilValue & operator=( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ClearDepthStencilValue & operator=( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ClearDepthStencilValue ) ); return *this; } @@ -24294,6 +23270,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkClearDepthStencilValue const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -24304,6 +23281,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ClearDepthStencilValue const& ) const = default; +#else bool operator==( ClearDepthStencilValue const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( depth == rhs.depth ) @@ -24314,33 +23295,40 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: float depth = {}; uint32_t stencil = {}; + }; static_assert( sizeof( ClearDepthStencilValue ) == sizeof( VkClearDepthStencilValue ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); union ClearValue { - ClearValue( VULKAN_HPP_NAMESPACE::ClearColorValue color_ = {} ) + ClearValue( VULKAN_HPP_NAMESPACE::ClearValue const& rhs ) VULKAN_HPP_NOEXCEPT { - color = color_; + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) ); } + ClearValue( VULKAN_HPP_NAMESPACE::ClearColorValue color_ = {} ) + : color( color_ ) + {} + ClearValue( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ ) - { - depthStencil = depthStencil_; - } + : depthStencil( depthStencil_ ) + {} - ClearValue & setColor( VULKAN_HPP_NAMESPACE::ClearColorValue color_ ) VULKAN_HPP_NOEXCEPT + ClearValue & setColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & color_ ) VULKAN_HPP_NOEXCEPT { color = color_; return *this; } - ClearValue & setDepthStencil( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ ) VULKAN_HPP_NOEXCEPT + ClearValue & setDepthStencil( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const & depthStencil_ ) VULKAN_HPP_NOEXCEPT { depthStencil = depthStencil_; return *this; @@ -24348,7 +23336,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ClearValue & operator=( VULKAN_HPP_NAMESPACE::ClearValue const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( this, &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) ); + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) ); return *this; } @@ -24373,22 +23361,30 @@ namespace VULKAN_HPP_NAMESPACE struct ClearAttachment { - ClearAttachment( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, - uint32_t colorAttachment_ = {}, - VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {} ) VULKAN_HPP_NOEXCEPT - : aspectMask( aspectMask_ ) - , colorAttachment( colorAttachment_ ) - , clearValue( clearValue_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + ClearAttachment(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t colorAttachment_ = {}, VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {}) VULKAN_HPP_NOEXCEPT + : aspectMask( aspectMask_ ), colorAttachment( colorAttachment_ ), clearValue( clearValue_ ) {} + ClearAttachment( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ClearAttachment( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - ClearAttachment& operator=( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT + ClearAttachment & operator=( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ClearAttachment & operator=( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ClearAttachment ) ); return *this; } @@ -24404,12 +23400,13 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ClearAttachment & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue clearValue_ ) VULKAN_HPP_NOEXCEPT + ClearAttachment & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT { clearValue = clearValue_; return *this; } + operator VkClearAttachment const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -24420,36 +23417,48 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + + + public: VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; uint32_t colorAttachment = {}; VULKAN_HPP_NAMESPACE::ClearValue clearValue = {}; + }; static_assert( sizeof( ClearAttachment ) == sizeof( VkClearAttachment ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ClearRect { - VULKAN_HPP_CONSTEXPR ClearRect( VULKAN_HPP_NAMESPACE::Rect2D rect_ = {}, - uint32_t baseArrayLayer_ = {}, - uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT - : rect( rect_ ) - , baseArrayLayer( baseArrayLayer_ ) - , layerCount( layerCount_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ClearRect(VULKAN_HPP_NAMESPACE::Rect2D rect_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {}) VULKAN_HPP_NOEXCEPT + : rect( rect_ ), baseArrayLayer( baseArrayLayer_ ), layerCount( layerCount_ ) {} + VULKAN_HPP_CONSTEXPR ClearRect( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ClearRect( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - ClearRect& operator=( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT + ClearRect & operator=( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ClearRect & setRect( VULKAN_HPP_NAMESPACE::Rect2D rect_ ) VULKAN_HPP_NOEXCEPT + ClearRect & operator=( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ClearRect ) ); + return *this; + } + + ClearRect & setRect( VULKAN_HPP_NAMESPACE::Rect2D const & rect_ ) VULKAN_HPP_NOEXCEPT { rect = rect_; return *this; @@ -24467,6 +23476,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkClearRect const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -24477,6 +23487,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ClearRect const& ) const = default; +#else bool operator==( ClearRect const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( rect == rhs.rect ) @@ -24488,342 +23502,45 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: VULKAN_HPP_NAMESPACE::Rect2D rect = {}; uint32_t baseArrayLayer = {}; uint32_t layerCount = {}; + }; static_assert( sizeof( ClearRect ) == sizeof( VkClearRect ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct IndirectCommandsTokenNVX - { - VULKAN_HPP_CONSTEXPR IndirectCommandsTokenNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX::ePipeline, - VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT - : tokenType( tokenType_ ) - , buffer( buffer_ ) - , offset( offset_ ) - {} - - IndirectCommandsTokenNVX( VkIndirectCommandsTokenNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - IndirectCommandsTokenNVX& operator=( VkIndirectCommandsTokenNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - IndirectCommandsTokenNVX & setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType_ ) VULKAN_HPP_NOEXCEPT - { - tokenType = tokenType_; - return *this; - } - - IndirectCommandsTokenNVX & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT - { - buffer = buffer_; - return *this; - } - - IndirectCommandsTokenNVX & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT - { - offset = offset_; - return *this; - } - - operator VkIndirectCommandsTokenNVX const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkIndirectCommandsTokenNVX &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( IndirectCommandsTokenNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( tokenType == rhs.tokenType ) - && ( buffer == rhs.buffer ) - && ( offset == rhs.offset ); - } - - bool operator!=( IndirectCommandsTokenNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX::ePipeline; - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; - VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; - }; - static_assert( sizeof( IndirectCommandsTokenNVX ) == sizeof( VkIndirectCommandsTokenNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct CmdProcessCommandsInfoNVX - { - VULKAN_HPP_CONSTEXPR CmdProcessCommandsInfoNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable_ = {}, - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout_ = {}, - uint32_t indirectCommandsTokenCount_ = {}, - const VULKAN_HPP_NAMESPACE::IndirectCommandsTokenNVX* pIndirectCommandsTokens_ = {}, - uint32_t maxSequencesCount_ = {}, - VULKAN_HPP_NAMESPACE::CommandBuffer targetCommandBuffer_ = {}, - VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, - VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {} ) VULKAN_HPP_NOEXCEPT - : objectTable( objectTable_ ) - , indirectCommandsLayout( indirectCommandsLayout_ ) - , indirectCommandsTokenCount( indirectCommandsTokenCount_ ) - , pIndirectCommandsTokens( pIndirectCommandsTokens_ ) - , maxSequencesCount( maxSequencesCount_ ) - , targetCommandBuffer( targetCommandBuffer_ ) - , sequencesCountBuffer( sequencesCountBuffer_ ) - , sequencesCountOffset( sequencesCountOffset_ ) - , sequencesIndexBuffer( sequencesIndexBuffer_ ) - , sequencesIndexOffset( sequencesIndexOffset_ ) - {} - - VULKAN_HPP_NAMESPACE::CmdProcessCommandsInfoNVX & operator=( VULKAN_HPP_NAMESPACE::CmdProcessCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CmdProcessCommandsInfoNVX ) - offsetof( CmdProcessCommandsInfoNVX, pNext ) ); - return *this; - } - - CmdProcessCommandsInfoNVX( VkCmdProcessCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - CmdProcessCommandsInfoNVX& operator=( VkCmdProcessCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - CmdProcessCommandsInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - CmdProcessCommandsInfoNVX & setObjectTable( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable_ ) VULKAN_HPP_NOEXCEPT - { - objectTable = objectTable_; - return *this; - } - - CmdProcessCommandsInfoNVX & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT - { - indirectCommandsLayout = indirectCommandsLayout_; - return *this; - } - - CmdProcessCommandsInfoNVX & setIndirectCommandsTokenCount( uint32_t indirectCommandsTokenCount_ ) VULKAN_HPP_NOEXCEPT - { - indirectCommandsTokenCount = indirectCommandsTokenCount_; - return *this; - } - - CmdProcessCommandsInfoNVX & setPIndirectCommandsTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsTokenNVX* pIndirectCommandsTokens_ ) VULKAN_HPP_NOEXCEPT - { - pIndirectCommandsTokens = pIndirectCommandsTokens_; - return *this; - } - - CmdProcessCommandsInfoNVX & setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT - { - maxSequencesCount = maxSequencesCount_; - return *this; - } - - CmdProcessCommandsInfoNVX & setTargetCommandBuffer( VULKAN_HPP_NAMESPACE::CommandBuffer targetCommandBuffer_ ) VULKAN_HPP_NOEXCEPT - { - targetCommandBuffer = targetCommandBuffer_; - return *this; - } - - CmdProcessCommandsInfoNVX & setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT - { - sequencesCountBuffer = sequencesCountBuffer_; - return *this; - } - - CmdProcessCommandsInfoNVX & setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT - { - sequencesCountOffset = sequencesCountOffset_; - return *this; - } - - CmdProcessCommandsInfoNVX & setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT - { - sequencesIndexBuffer = sequencesIndexBuffer_; - return *this; - } - - CmdProcessCommandsInfoNVX & setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT - { - sequencesIndexOffset = sequencesIndexOffset_; - return *this; - } - - operator VkCmdProcessCommandsInfoNVX const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkCmdProcessCommandsInfoNVX &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( CmdProcessCommandsInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( objectTable == rhs.objectTable ) - && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) - && ( indirectCommandsTokenCount == rhs.indirectCommandsTokenCount ) - && ( pIndirectCommandsTokens == rhs.pIndirectCommandsTokens ) - && ( maxSequencesCount == rhs.maxSequencesCount ) - && ( targetCommandBuffer == rhs.targetCommandBuffer ) - && ( sequencesCountBuffer == rhs.sequencesCountBuffer ) - && ( sequencesCountOffset == rhs.sequencesCountOffset ) - && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer ) - && ( sequencesIndexOffset == rhs.sequencesIndexOffset ); - } - - bool operator!=( CmdProcessCommandsInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCmdProcessCommandsInfoNVX; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable = {}; - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout = {}; - uint32_t indirectCommandsTokenCount = {}; - const VULKAN_HPP_NAMESPACE::IndirectCommandsTokenNVX* pIndirectCommandsTokens = {}; - uint32_t maxSequencesCount = {}; - VULKAN_HPP_NAMESPACE::CommandBuffer targetCommandBuffer = {}; - VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer = {}; - VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset = {}; - VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {}; - VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {}; - }; - static_assert( sizeof( CmdProcessCommandsInfoNVX ) == sizeof( VkCmdProcessCommandsInfoNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct CmdReserveSpaceForCommandsInfoNVX - { - VULKAN_HPP_CONSTEXPR CmdReserveSpaceForCommandsInfoNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable_ = {}, - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout_ = {}, - uint32_t maxSequencesCount_ = {} ) VULKAN_HPP_NOEXCEPT - : objectTable( objectTable_ ) - , indirectCommandsLayout( indirectCommandsLayout_ ) - , maxSequencesCount( maxSequencesCount_ ) - {} - - VULKAN_HPP_NAMESPACE::CmdReserveSpaceForCommandsInfoNVX & operator=( VULKAN_HPP_NAMESPACE::CmdReserveSpaceForCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CmdReserveSpaceForCommandsInfoNVX ) - offsetof( CmdReserveSpaceForCommandsInfoNVX, pNext ) ); - return *this; - } - - CmdReserveSpaceForCommandsInfoNVX( VkCmdReserveSpaceForCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - CmdReserveSpaceForCommandsInfoNVX& operator=( VkCmdReserveSpaceForCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - CmdReserveSpaceForCommandsInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - CmdReserveSpaceForCommandsInfoNVX & setObjectTable( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable_ ) VULKAN_HPP_NOEXCEPT - { - objectTable = objectTable_; - return *this; - } - - CmdReserveSpaceForCommandsInfoNVX & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT - { - indirectCommandsLayout = indirectCommandsLayout_; - return *this; - } - - CmdReserveSpaceForCommandsInfoNVX & setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT - { - maxSequencesCount = maxSequencesCount_; - return *this; - } - - operator VkCmdReserveSpaceForCommandsInfoNVX const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkCmdReserveSpaceForCommandsInfoNVX &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( objectTable == rhs.objectTable ) - && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) - && ( maxSequencesCount == rhs.maxSequencesCount ); - } - - bool operator!=( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCmdReserveSpaceForCommandsInfoNVX; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable = {}; - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout = {}; - uint32_t maxSequencesCount = {}; - }; - static_assert( sizeof( CmdReserveSpaceForCommandsInfoNVX ) == sizeof( VkCmdReserveSpaceForCommandsInfoNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct CoarseSampleLocationNV { - VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( uint32_t pixelX_ = {}, - uint32_t pixelY_ = {}, - uint32_t sample_ = {} ) VULKAN_HPP_NOEXCEPT - : pixelX( pixelX_ ) - , pixelY( pixelY_ ) - , sample( sample_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV(uint32_t pixelX_ = {}, uint32_t pixelY_ = {}, uint32_t sample_ = {}) VULKAN_HPP_NOEXCEPT + : pixelX( pixelX_ ), pixelY( pixelY_ ), sample( sample_ ) {} + VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CoarseSampleLocationNV( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - CoarseSampleLocationNV& operator=( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT + CoarseSampleLocationNV & operator=( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CoarseSampleLocationNV & operator=( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( CoarseSampleLocationNV ) ); return *this; } @@ -24845,6 +23562,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkCoarseSampleLocationNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -24855,6 +23573,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CoarseSampleLocationNV const& ) const = default; +#else bool operator==( CoarseSampleLocationNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( pixelX == rhs.pixelX ) @@ -24866,35 +23588,51 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: uint32_t pixelX = {}; uint32_t pixelY = {}; uint32_t sample = {}; + }; static_assert( sizeof( CoarseSampleLocationNV ) == sizeof( VkCoarseSampleLocationNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct CoarseSampleOrderCustomNV { - VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations, - uint32_t sampleCount_ = {}, - uint32_t sampleLocationCount_ = {}, - const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT - : shadingRate( shadingRate_ ) - , sampleCount( sampleCount_ ) - , sampleLocationCount( sampleLocationCount_ ) - , pSampleLocations( pSampleLocations_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV(VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations, uint32_t sampleCount_ = {}, uint32_t sampleLocationCount_ = {}, const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations_ = {}) VULKAN_HPP_NOEXCEPT + : shadingRate( shadingRate_ ), sampleCount( sampleCount_ ), sampleLocationCount( sampleLocationCount_ ), pSampleLocations( pSampleLocations_ ) {} + VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CoarseSampleOrderCustomNV( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - CoarseSampleOrderCustomNV& operator=( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + CoarseSampleOrderCustomNV( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_, uint32_t sampleCount_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & sampleLocations_ ) + : shadingRate( shadingRate_ ), sampleCount( sampleCount_ ), sampleLocationCount( static_cast( sampleLocations_.size() ) ), pSampleLocations( sampleLocations_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + CoarseSampleOrderCustomNV & operator=( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CoarseSampleOrderCustomNV & operator=( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( CoarseSampleOrderCustomNV ) ); return *this; } @@ -24922,6 +23660,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + CoarseSampleOrderCustomNV & setSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & sampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationCount = static_cast( sampleLocations_.size() ); + pSampleLocations = sampleLocations_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkCoarseSampleOrderCustomNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -24932,6 +23680,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CoarseSampleOrderCustomNV const& ) const = default; +#else bool operator==( CoarseSampleOrderCustomNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( shadingRate == rhs.shadingRate ) @@ -24944,40 +23696,147 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations; uint32_t sampleCount = {}; uint32_t sampleLocationCount = {}; const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations = {}; + }; static_assert( sizeof( CoarseSampleOrderCustomNV ) == sizeof( VkCoarseSampleOrderCustomNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct CommandBufferAllocateInfo + class CommandPool { - VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ = {}, - VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary, - uint32_t commandBufferCount_ = {} ) VULKAN_HPP_NOEXCEPT - : commandPool( commandPool_ ) - , level( level_ ) - , commandBufferCount( commandBufferCount_ ) + public: + using CType = VkCommandPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool; + + public: + VULKAN_HPP_CONSTEXPR CommandPool() VULKAN_HPP_NOEXCEPT + : m_commandPool(VK_NULL_HANDLE) {} - VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & operator=( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR CommandPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_commandPool(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT CommandPool( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT + : m_commandPool( commandPool ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + CommandPool & operator=(VkCommandPool commandPool) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo ) - offsetof( CommandBufferAllocateInfo, pNext ) ); + m_commandPool = commandPool; return *this; } +#endif + + CommandPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_commandPool = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CommandPool const& ) const = default; +#else + bool operator==( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandPool == rhs.m_commandPool; + } + + bool operator!=(CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandPool != rhs.m_commandPool; + } + + bool operator<(CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandPool < rhs.m_commandPool; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandPool() const VULKAN_HPP_NOEXCEPT + { + return m_commandPool; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_commandPool != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_commandPool == VK_NULL_HANDLE; + } + + private: + VkCommandPool m_commandPool; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::CommandPool ) == sizeof( VkCommandPool ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::CommandPool; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CommandPool; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CommandPool; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct CommandBufferAllocateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo(VULKAN_HPP_NAMESPACE::CommandPool commandPool_ = {}, VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary, uint32_t commandBufferCount_ = {}) VULKAN_HPP_NOEXCEPT + : commandPool( commandPool_ ), level( level_ ), commandBufferCount( commandBufferCount_ ) + {} + + VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - CommandBufferAllocateInfo& operator=( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + CommandBufferAllocateInfo & operator=( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CommandBufferAllocateInfo & operator=( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( CommandBufferAllocateInfo ) ); return *this; } @@ -25005,6 +23864,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkCommandBufferAllocateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -25015,6 +23875,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CommandBufferAllocateInfo const& ) const = default; +#else bool operator==( CommandBufferAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -25028,6 +23892,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferAllocateInfo; @@ -25035,40 +23902,244 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::CommandPool commandPool = {}; VULKAN_HPP_NAMESPACE::CommandBufferLevel level = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary; uint32_t commandBufferCount = {}; + }; static_assert( sizeof( CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct CommandBufferInheritanceInfo + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, - uint32_t subpass_ = {}, - VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ = {}, - VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ = {}, - VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {} ) VULKAN_HPP_NOEXCEPT - : renderPass( renderPass_ ) - , subpass( subpass_ ) - , framebuffer( framebuffer_ ) - , occlusionQueryEnable( occlusionQueryEnable_ ) - , queryFlags( queryFlags_ ) - , pipelineStatistics( pipelineStatistics_ ) + using Type = CommandBufferAllocateInfo; + }; + + class RenderPass + { + public: + using CType = VkRenderPass; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass; + + public: + VULKAN_HPP_CONSTEXPR RenderPass() VULKAN_HPP_NOEXCEPT + : m_renderPass(VK_NULL_HANDLE) {} - VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo & operator=( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR RenderPass( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_renderPass(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT RenderPass( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT + : m_renderPass( renderPass ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + RenderPass & operator=(VkRenderPass renderPass) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo ) - offsetof( CommandBufferInheritanceInfo, pNext ) ); + m_renderPass = renderPass; return *this; } +#endif + + RenderPass & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_renderPass = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RenderPass const& ) const = default; +#else + bool operator==( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_renderPass == rhs.m_renderPass; + } + + bool operator!=(RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_renderPass != rhs.m_renderPass; + } + + bool operator<(RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_renderPass < rhs.m_renderPass; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkRenderPass() const VULKAN_HPP_NOEXCEPT + { + return m_renderPass; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_renderPass != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_renderPass == VK_NULL_HANDLE; + } + + private: + VkRenderPass m_renderPass; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::RenderPass ) == sizeof( VkRenderPass ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::RenderPass; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::RenderPass; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::RenderPass; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Framebuffer + { + public: + using CType = VkFramebuffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer; + + public: + VULKAN_HPP_CONSTEXPR Framebuffer() VULKAN_HPP_NOEXCEPT + : m_framebuffer(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Framebuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_framebuffer(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Framebuffer( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT + : m_framebuffer( framebuffer ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Framebuffer & operator=(VkFramebuffer framebuffer) VULKAN_HPP_NOEXCEPT + { + m_framebuffer = framebuffer; + return *this; + } +#endif + + Framebuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_framebuffer = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Framebuffer const& ) const = default; +#else + bool operator==( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer == rhs.m_framebuffer; + } + + bool operator!=(Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer != rhs.m_framebuffer; + } + + bool operator<(Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer < rhs.m_framebuffer; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFramebuffer() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer == VK_NULL_HANDLE; + } + + private: + VkFramebuffer m_framebuffer; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Framebuffer ) == sizeof( VkFramebuffer ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Framebuffer; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Framebuffer; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Framebuffer; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct CommandBufferInheritanceInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo(VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ = {}, VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ = {}, VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {}) VULKAN_HPP_NOEXCEPT + : renderPass( renderPass_ ), subpass( subpass_ ), framebuffer( framebuffer_ ), occlusionQueryEnable( occlusionQueryEnable_ ), queryFlags( queryFlags_ ), pipelineStatistics( pipelineStatistics_ ) + {} + + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - CommandBufferInheritanceInfo& operator=( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceInfo & operator=( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CommandBufferInheritanceInfo & operator=( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( CommandBufferInheritanceInfo ) ); return *this; } @@ -25114,6 +24185,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkCommandBufferInheritanceInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -25124,6 +24196,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CommandBufferInheritanceInfo const& ) const = default; +#else bool operator==( CommandBufferInheritanceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -25140,6 +24216,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceInfo; @@ -25150,32 +24229,44 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable = {}; VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags = {}; VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {}; + }; static_assert( sizeof( CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = CommandBufferInheritanceInfo; + }; + struct CommandBufferBeginInfo { - VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ = {}, - const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo* pInheritanceInfo_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , pInheritanceInfo( pInheritanceInfo_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferBeginInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo(VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ = {}, const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo* pInheritanceInfo_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), pInheritanceInfo( pInheritanceInfo_ ) {} - VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & operator=( VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo ) - offsetof( CommandBufferBeginInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - CommandBufferBeginInfo& operator=( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + CommandBufferBeginInfo & operator=( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CommandBufferBeginInfo & operator=( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( CommandBufferBeginInfo ) ); return *this; } @@ -25197,6 +24288,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkCommandBufferBeginInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -25207,6 +24299,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CommandBufferBeginInfo const& ) const = default; +#else bool operator==( CommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -25219,36 +24315,53 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferBeginInfo; const void* pNext = {}; VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags = {}; const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo* pInheritanceInfo = {}; + }; static_assert( sizeof( CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = CommandBufferBeginInfo; + }; + struct CommandBufferInheritanceConditionalRenderingInfoEXT { - VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ = {} ) VULKAN_HPP_NOEXCEPT - : conditionalRenderingEnable( conditionalRenderingEnable_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ = {}) VULKAN_HPP_NOEXCEPT + : conditionalRenderingEnable( conditionalRenderingEnable_ ) {} - VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT ) - offsetof( CommandBufferInheritanceConditionalRenderingInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; CommandBufferInheritanceConditionalRenderingInfoEXT( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - CommandBufferInheritanceConditionalRenderingInfoEXT& operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) ); return *this; } @@ -25264,6 +24377,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -25274,6 +24388,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CommandBufferInheritanceConditionalRenderingInfoEXT const& ) const = default; +#else bool operator==( CommandBufferInheritanceConditionalRenderingInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -25285,37 +24403,147 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT; const void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable = {}; + }; static_assert( sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) == sizeof( VkCommandBufferInheritanceConditionalRenderingInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct CommandPoolCreateInfo + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = {}, - uint32_t queueFamilyIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , queueFamilyIndex( queueFamilyIndex_ ) + using Type = CommandBufferInheritanceConditionalRenderingInfoEXT; + }; + + struct CommandBufferInheritanceRenderPassTransformInfoQCOM + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM(VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}) VULKAN_HPP_NOEXCEPT + : transform( transform_ ), renderArea( renderArea_ ) {} - VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & operator=( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceRenderPassTransformInfoQCOM( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo ) - offsetof( CommandPoolCreateInfo, pNext ) ); + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + CommandBufferInheritanceRenderPassTransformInfoQCOM & operator=( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } + CommandBufferInheritanceRenderPassTransformInfoQCOM & operator=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( CommandBufferInheritanceRenderPassTransformInfoQCOM ) ); + return *this; + } + + CommandBufferInheritanceRenderPassTransformInfoQCOM & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CommandBufferInheritanceRenderPassTransformInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + { + transform = transform_; + return *this; + } + + CommandBufferInheritanceRenderPassTransformInfoQCOM & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT + { + renderArea = renderArea_; + return *this; + } + + + operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CommandBufferInheritanceRenderPassTransformInfoQCOM const& ) const = default; +#else + bool operator==( CommandBufferInheritanceRenderPassTransformInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( transform == rhs.transform ) + && ( renderArea == rhs.renderArea ); + } + + bool operator!=( CommandBufferInheritanceRenderPassTransformInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; + + }; + static_assert( sizeof( CommandBufferInheritanceRenderPassTransformInfoQCOM ) == sizeof( VkCommandBufferInheritanceRenderPassTransformInfoQCOM ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CommandBufferInheritanceRenderPassTransformInfoQCOM; + }; + + struct CommandPoolCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandPoolCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo(VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - CommandPoolCreateInfo& operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + CommandPoolCreateInfo & operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CommandPoolCreateInfo & operator=( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( CommandPoolCreateInfo ) ); return *this; } @@ -25337,6 +24565,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkCommandPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -25347,6 +24576,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CommandPoolCreateInfo const& ) const = default; +#else bool operator==( CommandPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -25359,34 +24592,152 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandPoolCreateInfo; const void* pNext = {}; VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags = {}; uint32_t queueFamilyIndex = {}; + }; static_assert( sizeof( CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = CommandPoolCreateInfo; + }; + + class ShaderModule + { + public: + using CType = VkShaderModule; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule; + + public: + VULKAN_HPP_CONSTEXPR ShaderModule() VULKAN_HPP_NOEXCEPT + : m_shaderModule(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_shaderModule(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT + : m_shaderModule( shaderModule ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + ShaderModule & operator=(VkShaderModule shaderModule) VULKAN_HPP_NOEXCEPT + { + m_shaderModule = shaderModule; + return *this; + } +#endif + + ShaderModule & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_shaderModule = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ShaderModule const& ) const = default; +#else + bool operator==( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule == rhs.m_shaderModule; + } + + bool operator!=(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule != rhs.m_shaderModule; + } + + bool operator<(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule < rhs.m_shaderModule; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule == VK_NULL_HANDLE; + } + + private: + VkShaderModule m_shaderModule; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::ShaderModule ) == sizeof( VkShaderModule ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::ShaderModule; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ShaderModule; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ShaderModule; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + struct SpecializationMapEntry { - VULKAN_HPP_CONSTEXPR SpecializationMapEntry( uint32_t constantID_ = {}, - uint32_t offset_ = {}, - size_t size_ = {} ) VULKAN_HPP_NOEXCEPT - : constantID( constantID_ ) - , offset( offset_ ) - , size( size_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SpecializationMapEntry(uint32_t constantID_ = {}, uint32_t offset_ = {}, size_t size_ = {}) VULKAN_HPP_NOEXCEPT + : constantID( constantID_ ), offset( offset_ ), size( size_ ) {} + VULKAN_HPP_CONSTEXPR SpecializationMapEntry( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - SpecializationMapEntry& operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT + SpecializationMapEntry & operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SpecializationMapEntry & operator=( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SpecializationMapEntry ) ); return *this; } @@ -25408,6 +24759,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkSpecializationMapEntry const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -25418,6 +24770,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SpecializationMapEntry const& ) const = default; +#else bool operator==( SpecializationMapEntry const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( constantID == rhs.constantID ) @@ -25429,35 +24785,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: uint32_t constantID = {}; uint32_t offset = {}; size_t size = {}; + }; static_assert( sizeof( SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SpecializationInfo { - VULKAN_HPP_CONSTEXPR SpecializationInfo( uint32_t mapEntryCount_ = {}, - const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries_ = {}, - size_t dataSize_ = {}, - const void* pData_ = {} ) VULKAN_HPP_NOEXCEPT - : mapEntryCount( mapEntryCount_ ) - , pMapEntries( pMapEntries_ ) - , dataSize( dataSize_ ) - , pData( pData_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SpecializationInfo(uint32_t mapEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries_ = {}, size_t dataSize_ = {}, const void* pData_ = {}) VULKAN_HPP_NOEXCEPT + : mapEntryCount( mapEntryCount_ ), pMapEntries( pMapEntries_ ), dataSize( dataSize_ ), pData( pData_ ) {} + VULKAN_HPP_CONSTEXPR SpecializationInfo( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - SpecializationInfo& operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + template + SpecializationInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & mapEntries_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ = {} ) + : mapEntryCount( static_cast( mapEntries_.size() ) ), pMapEntries( mapEntries_.data() ), dataSize( data_.size() * sizeof(T) ), pData( data_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SpecializationInfo & operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SpecializationInfo & operator=( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SpecializationInfo ) ); return *this; } @@ -25473,6 +24846,15 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SpecializationInfo & setMapEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & mapEntries_ ) VULKAN_HPP_NOEXCEPT + { + mapEntryCount = static_cast( mapEntries_.size() ); + pMapEntries = mapEntries_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SpecializationInfo & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT { dataSize = dataSize_; @@ -25485,6 +24867,17 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + template + SpecializationInfo & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = data_.size() * sizeof(T); + pData = data_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkSpecializationInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -25495,6 +24888,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SpecializationInfo const& ) const = default; +#else bool operator==( SpecializationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( mapEntryCount == rhs.mapEntryCount ) @@ -25507,44 +24904,47 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: uint32_t mapEntryCount = {}; const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries = {}; size_t dataSize = {}; const void* pData = {}; + }; static_assert( sizeof( SpecializationInfo ) == sizeof( VkSpecializationInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PipelineShaderStageCreateInfo { - VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, - VULKAN_HPP_NAMESPACE::ShaderModule module_ = {}, - const char* pName_ = {}, - const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , stage( stage_ ) - , module( module_ ) - , pName( pName_ ) - , pSpecializationInfo( pSpecializationInfo_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo(VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, VULKAN_HPP_NAMESPACE::ShaderModule module_ = {}, const char* pName_ = {}, const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), stage( stage_ ), module( module_ ), pName( pName_ ), pSpecializationInfo( pSpecializationInfo_ ) {} - VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo ) - offsetof( PipelineShaderStageCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PipelineShaderStageCreateInfo& operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineShaderStageCreateInfo & operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineShaderStageCreateInfo & operator=( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineShaderStageCreateInfo ) ); return *this; } @@ -25584,6 +24984,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPipelineShaderStageCreateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -25594,6 +24995,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineShaderStageCreateInfo const& ) const = default; +#else bool operator==( PipelineShaderStageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -25609,6 +25014,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageCreateInfo; @@ -25618,38 +25026,244 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ShaderModule module = {}; const char* pName = {}; const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo = {}; + }; static_assert( sizeof( PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct ComputePipelineCreateInfo + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, - int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , stage( stage_ ) - , layout( layout_ ) - , basePipelineHandle( basePipelineHandle_ ) - , basePipelineIndex( basePipelineIndex_ ) + using Type = PipelineShaderStageCreateInfo; + }; + + class PipelineLayout + { + public: + using CType = VkPipelineLayout; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout; + + public: + VULKAN_HPP_CONSTEXPR PipelineLayout() VULKAN_HPP_NOEXCEPT + : m_pipelineLayout(VK_NULL_HANDLE) {} - VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_pipelineLayout(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PipelineLayout( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT + : m_pipelineLayout( pipelineLayout ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + PipelineLayout & operator=(VkPipelineLayout pipelineLayout) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo ) - offsetof( ComputePipelineCreateInfo, pNext ) ); + m_pipelineLayout = pipelineLayout; return *this; } +#endif + + PipelineLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_pipelineLayout = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineLayout const& ) const = default; +#else + bool operator==( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout == rhs.m_pipelineLayout; + } + + bool operator!=(PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout != rhs.m_pipelineLayout; + } + + bool operator<(PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout < rhs.m_pipelineLayout; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineLayout() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout == VK_NULL_HANDLE; + } + + private: + VkPipelineLayout m_pipelineLayout; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::PipelineLayout ) == sizeof( VkPipelineLayout ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::PipelineLayout; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineLayout; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineLayout; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Pipeline + { + public: + using CType = VkPipeline; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipeline; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline; + + public: + VULKAN_HPP_CONSTEXPR Pipeline() VULKAN_HPP_NOEXCEPT + : m_pipeline(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_pipeline(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT + : m_pipeline( pipeline ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Pipeline & operator=(VkPipeline pipeline) VULKAN_HPP_NOEXCEPT + { + m_pipeline = pipeline; + return *this; + } +#endif + + Pipeline & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_pipeline = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Pipeline const& ) const = default; +#else + bool operator==( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipeline == rhs.m_pipeline; + } + + bool operator!=(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipeline != rhs.m_pipeline; + } + + bool operator<(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipeline < rhs.m_pipeline; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline == VK_NULL_HANDLE; + } + + private: + VkPipeline m_pipeline; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Pipeline ) == sizeof( VkPipeline ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Pipeline; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Pipeline; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Pipeline; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct ComputePipelineCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), stage( stage_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - ComputePipelineCreateInfo& operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ComputePipelineCreateInfo & operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ComputePipelineCreateInfo & operator=( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ComputePipelineCreateInfo ) ); return *this; } @@ -25665,7 +25279,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ComputePipelineCreateInfo & setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ ) VULKAN_HPP_NOEXCEPT + ComputePipelineCreateInfo & setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & stage_ ) VULKAN_HPP_NOEXCEPT { stage = stage_; return *this; @@ -25689,6 +25303,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkComputePipelineCreateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -25699,6 +25314,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ComputePipelineCreateInfo const& ) const = default; +#else bool operator==( ComputePipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -25714,6 +25333,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineCreateInfo; @@ -25723,34 +25345,44 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; int32_t basePipelineIndex = {}; + }; static_assert( sizeof( ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = ComputePipelineCreateInfo; + }; + struct ConditionalRenderingBeginInfoEXT { - VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, - VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {} ) VULKAN_HPP_NOEXCEPT - : buffer( buffer_ ) - , offset( offset_ ) - , flags( flags_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eConditionalRenderingBeginInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {}) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ), offset( offset_ ), flags( flags_ ) {} - VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & operator=( VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT ) - offsetof( ConditionalRenderingBeginInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - ConditionalRenderingBeginInfoEXT& operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ConditionalRenderingBeginInfoEXT & operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ConditionalRenderingBeginInfoEXT & operator=( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ConditionalRenderingBeginInfoEXT ) ); return *this; } @@ -25778,6 +25410,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkConditionalRenderingBeginInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -25788,6 +25421,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ConditionalRenderingBeginInfoEXT const& ) const = default; +#else bool operator==( ConditionalRenderingBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -25801,6 +25438,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT; @@ -25808,30 +25448,43 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Buffer buffer = {}; VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags = {}; + }; static_assert( sizeof( ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = ConditionalRenderingBeginInfoEXT; + }; + struct ConformanceVersion { - VULKAN_HPP_CONSTEXPR ConformanceVersion( uint8_t major_ = {}, - uint8_t minor_ = {}, - uint8_t subminor_ = {}, - uint8_t patch_ = {} ) VULKAN_HPP_NOEXCEPT - : major( major_ ) - , minor( minor_ ) - , subminor( subminor_ ) - , patch( patch_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ConformanceVersion(uint8_t major_ = {}, uint8_t minor_ = {}, uint8_t subminor_ = {}, uint8_t patch_ = {}) VULKAN_HPP_NOEXCEPT + : major( major_ ), minor( minor_ ), subminor( subminor_ ), patch( patch_ ) {} + VULKAN_HPP_CONSTEXPR ConformanceVersion( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ConformanceVersion( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - ConformanceVersion& operator=( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT + ConformanceVersion & operator=( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ConformanceVersion & operator=( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ConformanceVersion ) ); return *this; } @@ -25859,6 +25512,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkConformanceVersion const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -25869,6 +25523,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ConformanceVersion const& ) const = default; +#else bool operator==( ConformanceVersion const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( major == rhs.major ) @@ -25881,50 +25539,48 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: uint8_t major = {}; uint8_t minor = {}; uint8_t subminor = {}; uint8_t patch = {}; + }; static_assert( sizeof( ConformanceVersion ) == sizeof( VkConformanceVersion ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + using ConformanceVersionKHR = ConformanceVersion; struct CooperativeMatrixPropertiesNV { - VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( uint32_t MSize_ = {}, - uint32_t NSize_ = {}, - uint32_t KSize_ = {}, - VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, - VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, - VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, - VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, - VULKAN_HPP_NAMESPACE::ScopeNV scope_ = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice ) VULKAN_HPP_NOEXCEPT - : MSize( MSize_ ) - , NSize( NSize_ ) - , KSize( KSize_ ) - , AType( AType_ ) - , BType( BType_ ) - , CType( CType_ ) - , DType( DType_ ) - , scope( scope_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV(uint32_t MSize_ = {}, uint32_t NSize_ = {}, uint32_t KSize_ = {}, VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ScopeNV scope_ = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice) VULKAN_HPP_NOEXCEPT + : MSize( MSize_ ), NSize( NSize_ ), KSize( KSize_ ), AType( AType_ ), BType( BType_ ), CType( CType_ ), DType( DType_ ), scope( scope_ ) {} - VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV & operator=( VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV ) - offsetof( CooperativeMatrixPropertiesNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; CooperativeMatrixPropertiesNV( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - CooperativeMatrixPropertiesNV& operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + CooperativeMatrixPropertiesNV & operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CooperativeMatrixPropertiesNV & operator=( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( CooperativeMatrixPropertiesNV ) ); return *this; } @@ -25982,6 +25638,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkCooperativeMatrixPropertiesNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -25992,6 +25649,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CooperativeMatrixPropertiesNV const& ) const = default; +#else bool operator==( CooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -26010,6 +25671,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixPropertiesNV; @@ -26022,42 +25686,596 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ComponentTypeNV CType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; VULKAN_HPP_NAMESPACE::ComponentTypeNV DType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; VULKAN_HPP_NAMESPACE::ScopeNV scope = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice; + }; static_assert( sizeof( CooperativeMatrixPropertiesNV ) == sizeof( VkCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct CopyDescriptorSet + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR CopyDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = {}, - uint32_t srcBinding_ = {}, - uint32_t srcArrayElement_ = {}, - VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, - uint32_t dstBinding_ = {}, - uint32_t dstArrayElement_ = {}, - uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT - : srcSet( srcSet_ ) - , srcBinding( srcBinding_ ) - , srcArrayElement( srcArrayElement_ ) - , dstSet( dstSet_ ) - , dstBinding( dstBinding_ ) - , dstArrayElement( dstArrayElement_ ) - , descriptorCount( descriptorCount_ ) + using Type = CooperativeMatrixPropertiesNV; + }; + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct CopyAccelerationStructureInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone) VULKAN_HPP_NOEXCEPT + : src( src_ ), dst( dst_ ), mode( mode_ ) {} - VULKAN_HPP_NAMESPACE::CopyDescriptorSet & operator=( VULKAN_HPP_NAMESPACE::CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyAccelerationStructureInfoKHR( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CopyDescriptorSet ) - offsetof( CopyDescriptorSet, pNext ) ); + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + CopyAccelerationStructureInfoKHR & operator=( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } + CopyAccelerationStructureInfoKHR & operator=( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( CopyAccelerationStructureInfoKHR ) ); + return *this; + } + + CopyAccelerationStructureInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CopyAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT + { + src = src_; + return *this; + } + + CopyAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT + { + dst = dst_; + return *this; + } + + CopyAccelerationStructureInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + + operator VkCopyAccelerationStructureInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CopyAccelerationStructureInfoKHR const& ) const = default; +#else + bool operator==( CopyAccelerationStructureInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( src == rhs.src ) + && ( dst == rhs.dst ) + && ( mode == rhs.mode ); + } + + bool operator!=( CopyAccelerationStructureInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; + + }; + static_assert( sizeof( CopyAccelerationStructureInfoKHR ) == sizeof( VkCopyAccelerationStructureInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CopyAccelerationStructureInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct CopyAccelerationStructureToMemoryInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + CopyAccelerationStructureToMemoryInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone) VULKAN_HPP_NOEXCEPT + : src( src_ ), dst( dst_ ), mode( mode_ ) + {} + + CopyAccelerationStructureToMemoryInfoKHR( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyAccelerationStructureToMemoryInfoKHR( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + CopyAccelerationStructureToMemoryInfoKHR & operator=( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CopyAccelerationStructureToMemoryInfoKHR & operator=( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( CopyAccelerationStructureToMemoryInfoKHR ) ); + return *this; + } + + CopyAccelerationStructureToMemoryInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CopyAccelerationStructureToMemoryInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT + { + src = src_; + return *this; + } + + CopyAccelerationStructureToMemoryInfoKHR & setDst( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT + { + dst = dst_; + return *this; + } + + CopyAccelerationStructureToMemoryInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + + operator VkCopyAccelerationStructureToMemoryInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyAccelerationStructureToMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; + + }; + static_assert( sizeof( CopyAccelerationStructureToMemoryInfoKHR ) == sizeof( VkCopyAccelerationStructureToMemoryInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CopyAccelerationStructureToMemoryInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct CopyBufferInfo2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyBufferInfo2KHR(VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferCopy2KHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT + : srcBuffer( srcBuffer_ ), dstBuffer( dstBuffer_ ), regionCount( regionCount_ ), pRegions( pRegions_ ) + {} + + VULKAN_HPP_CONSTEXPR CopyBufferInfo2KHR( CopyBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyBufferInfo2KHR( VkCopyBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + CopyBufferInfo2KHR( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + : srcBuffer( srcBuffer_ ), dstBuffer( dstBuffer_ ), regionCount( static_cast( regions_.size() ) ), pRegions( regions_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + CopyBufferInfo2KHR & operator=( VkCopyBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CopyBufferInfo2KHR & operator=( CopyBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( CopyBufferInfo2KHR ) ); + return *this; + } + + CopyBufferInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CopyBufferInfo2KHR & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT + { + srcBuffer = srcBuffer_; + return *this; + } + + CopyBufferInfo2KHR & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT + { + dstBuffer = dstBuffer_; + return *this; + } + + CopyBufferInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + CopyBufferInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::BufferCopy2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + CopyBufferInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkCopyBufferInfo2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyBufferInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CopyBufferInfo2KHR const& ) const = default; +#else + bool operator==( CopyBufferInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcBuffer == rhs.srcBuffer ) + && ( dstBuffer == rhs.dstBuffer ) + && ( regionCount == rhs.regionCount ) + && ( pRegions == rhs.pRegions ); + } + + bool operator!=( CopyBufferInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferInfo2KHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {}; + VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {}; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::BufferCopy2KHR* pRegions = {}; + + }; + static_assert( sizeof( CopyBufferInfo2KHR ) == sizeof( VkCopyBufferInfo2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CopyBufferInfo2KHR; + }; + + struct CopyBufferToImageInfo2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferToImageInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2KHR(VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT + : srcBuffer( srcBuffer_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ ) + {} + + VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2KHR( CopyBufferToImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyBufferToImageInfo2KHR( VkCopyBufferToImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + CopyBufferToImageInfo2KHR( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + : srcBuffer( srcBuffer_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast( regions_.size() ) ), pRegions( regions_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + CopyBufferToImageInfo2KHR & operator=( VkCopyBufferToImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CopyBufferToImageInfo2KHR & operator=( CopyBufferToImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( CopyBufferToImageInfo2KHR ) ); + return *this; + } + + CopyBufferToImageInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CopyBufferToImageInfo2KHR & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT + { + srcBuffer = srcBuffer_; + return *this; + } + + CopyBufferToImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + CopyBufferToImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + CopyBufferToImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + CopyBufferToImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + CopyBufferToImageInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkCopyBufferToImageInfo2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyBufferToImageInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CopyBufferToImageInfo2KHR const& ) const = default; +#else + bool operator==( CopyBufferToImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcBuffer == rhs.srcBuffer ) + && ( dstImage == rhs.dstImage ) + && ( dstImageLayout == rhs.dstImageLayout ) + && ( regionCount == rhs.regionCount ) + && ( pRegions == rhs.pRegions ); + } + + bool operator!=( CopyBufferToImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferToImageInfo2KHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {}; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions = {}; + + }; + static_assert( sizeof( CopyBufferToImageInfo2KHR ) == sizeof( VkCopyBufferToImageInfo2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CopyBufferToImageInfo2KHR; + }; + + class DescriptorSet + { + public: + using CType = VkDescriptorSet; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet; + + public: + VULKAN_HPP_CONSTEXPR DescriptorSet() VULKAN_HPP_NOEXCEPT + : m_descriptorSet(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_descriptorSet(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT + : m_descriptorSet( descriptorSet ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DescriptorSet & operator=(VkDescriptorSet descriptorSet) VULKAN_HPP_NOEXCEPT + { + m_descriptorSet = descriptorSet; + return *this; + } +#endif + + DescriptorSet & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSet = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorSet const& ) const = default; +#else + bool operator==( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet == rhs.m_descriptorSet; + } + + bool operator!=(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet != rhs.m_descriptorSet; + } + + bool operator<(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet < rhs.m_descriptorSet; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet == VK_NULL_HANDLE; + } + + private: + VkDescriptorSet m_descriptorSet; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSet ) == sizeof( VkDescriptorSet ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DescriptorSet; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSet; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSet; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct CopyDescriptorSet + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyDescriptorSet; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyDescriptorSet(VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = {}, uint32_t srcBinding_ = {}, uint32_t srcArrayElement_ = {}, VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}) VULKAN_HPP_NOEXCEPT + : srcSet( srcSet_ ), srcBinding( srcBinding_ ), srcArrayElement( srcArrayElement_ ), dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( descriptorCount_ ) + {} + + VULKAN_HPP_CONSTEXPR CopyDescriptorSet( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - CopyDescriptorSet& operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + CopyDescriptorSet & operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CopyDescriptorSet & operator=( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( CopyDescriptorSet ) ); return *this; } @@ -26109,6 +26327,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkCopyDescriptorSet const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -26119,6 +26338,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CopyDescriptorSet const& ) const = default; +#else bool operator==( CopyDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -26136,6 +26359,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyDescriptorSet; @@ -26147,38 +26373,533 @@ namespace VULKAN_HPP_NAMESPACE uint32_t dstBinding = {}; uint32_t dstArrayElement = {}; uint32_t descriptorCount = {}; + }; static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - - struct D3D12FenceSubmitInfoKHR + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( uint32_t waitSemaphoreValuesCount_ = {}, - const uint64_t* pWaitSemaphoreValues_ = {}, - uint32_t signalSemaphoreValuesCount_ = {}, - const uint64_t* pSignalSemaphoreValues_ = {} ) VULKAN_HPP_NOEXCEPT - : waitSemaphoreValuesCount( waitSemaphoreValuesCount_ ) - , pWaitSemaphoreValues( pWaitSemaphoreValues_ ) - , signalSemaphoreValuesCount( signalSemaphoreValuesCount_ ) - , pSignalSemaphoreValues( pSignalSemaphoreValues_ ) + using Type = CopyDescriptorSet; + }; + + struct ImageCopy2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCopy2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCopy2KHR(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ ) {} - VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR & operator=( VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ImageCopy2KHR( ImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCopy2KHR( VkImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR ) - offsetof( D3D12FenceSubmitInfoKHR, pNext ) ); + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ImageCopy2KHR & operator=( VkImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } + ImageCopy2KHR & operator=( ImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImageCopy2KHR ) ); + return *this; + } + + ImageCopy2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageCopy2KHR & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + ImageCopy2KHR & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + ImageCopy2KHR & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + ImageCopy2KHR & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + ImageCopy2KHR & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + + operator VkImageCopy2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageCopy2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageCopy2KHR const& ) const = default; +#else + bool operator==( ImageCopy2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcSubresource == rhs.srcSubresource ) + && ( srcOffset == rhs.srcOffset ) + && ( dstSubresource == rhs.dstSubresource ) + && ( dstOffset == rhs.dstOffset ) + && ( extent == rhs.extent ); + } + + bool operator!=( ImageCopy2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCopy2KHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + + }; + static_assert( sizeof( ImageCopy2KHR ) == sizeof( VkImageCopy2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageCopy2KHR; + }; + + struct CopyImageInfo2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyImageInfo2KHR(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageCopy2KHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT + : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ ) + {} + + VULKAN_HPP_CONSTEXPR CopyImageInfo2KHR( CopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyImageInfo2KHR( VkCopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + CopyImageInfo2KHR( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast( regions_.size() ) ), pRegions( regions_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + CopyImageInfo2KHR & operator=( VkCopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CopyImageInfo2KHR & operator=( CopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( CopyImageInfo2KHR ) ); + return *this; + } + + CopyImageInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CopyImageInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + CopyImageInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + CopyImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + CopyImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + CopyImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + CopyImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::ImageCopy2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + CopyImageInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkCopyImageInfo2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyImageInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CopyImageInfo2KHR const& ) const = default; +#else + bool operator==( CopyImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcImage == rhs.srcImage ) + && ( srcImageLayout == rhs.srcImageLayout ) + && ( dstImage == rhs.dstImage ) + && ( dstImageLayout == rhs.dstImageLayout ) + && ( regionCount == rhs.regionCount ) + && ( pRegions == rhs.pRegions ); + } + + bool operator!=( CopyImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageInfo2KHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageCopy2KHR* pRegions = {}; + + }; + static_assert( sizeof( CopyImageInfo2KHR ) == sizeof( VkCopyImageInfo2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CopyImageInfo2KHR; + }; + + struct CopyImageToBufferInfo2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToBufferInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2KHR(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT + : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstBuffer( dstBuffer_ ), regionCount( regionCount_ ), pRegions( pRegions_ ) + {} + + VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2KHR( CopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyImageToBufferInfo2KHR( VkCopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + CopyImageToBufferInfo2KHR( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstBuffer( dstBuffer_ ), regionCount( static_cast( regions_.size() ) ), pRegions( regions_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + CopyImageToBufferInfo2KHR & operator=( VkCopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CopyImageToBufferInfo2KHR & operator=( CopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( CopyImageToBufferInfo2KHR ) ); + return *this; + } + + CopyImageToBufferInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CopyImageToBufferInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + CopyImageToBufferInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + CopyImageToBufferInfo2KHR & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT + { + dstBuffer = dstBuffer_; + return *this; + } + + CopyImageToBufferInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + CopyImageToBufferInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + CopyImageToBufferInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkCopyImageToBufferInfo2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyImageToBufferInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CopyImageToBufferInfo2KHR const& ) const = default; +#else + bool operator==( CopyImageToBufferInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcImage == rhs.srcImage ) + && ( srcImageLayout == rhs.srcImageLayout ) + && ( dstBuffer == rhs.dstBuffer ) + && ( regionCount == rhs.regionCount ) + && ( pRegions == rhs.pRegions ); + } + + bool operator!=( CopyImageToBufferInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToBufferInfo2KHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {}; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions = {}; + + }; + static_assert( sizeof( CopyImageToBufferInfo2KHR ) == sizeof( VkCopyImageToBufferInfo2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CopyImageToBufferInfo2KHR; + }; + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct CopyMemoryToAccelerationStructureInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + CopyMemoryToAccelerationStructureInfoKHR(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone) VULKAN_HPP_NOEXCEPT + : src( src_ ), dst( dst_ ), mode( mode_ ) + {} + + CopyMemoryToAccelerationStructureInfoKHR( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyMemoryToAccelerationStructureInfoKHR( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + CopyMemoryToAccelerationStructureInfoKHR & operator=( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CopyMemoryToAccelerationStructureInfoKHR & operator=( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( CopyMemoryToAccelerationStructureInfoKHR ) ); + return *this; + } + + CopyMemoryToAccelerationStructureInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CopyMemoryToAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT + { + src = src_; + return *this; + } + + CopyMemoryToAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT + { + dst = dst_; + return *this; + } + + CopyMemoryToAccelerationStructureInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + + operator VkCopyMemoryToAccelerationStructureInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMemoryToAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; + + }; + static_assert( sizeof( CopyMemoryToAccelerationStructureInfoKHR ) == sizeof( VkCopyMemoryToAccelerationStructureInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CopyMemoryToAccelerationStructureInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct D3D12FenceSubmitInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eD3D12FenceSubmitInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR(uint32_t waitSemaphoreValuesCount_ = {}, const uint64_t* pWaitSemaphoreValues_ = {}, uint32_t signalSemaphoreValuesCount_ = {}, const uint64_t* pSignalSemaphoreValues_ = {}) VULKAN_HPP_NOEXCEPT + : waitSemaphoreValuesCount( waitSemaphoreValuesCount_ ), pWaitSemaphoreValues( pWaitSemaphoreValues_ ), signalSemaphoreValuesCount( signalSemaphoreValuesCount_ ), pSignalSemaphoreValues( pSignalSemaphoreValues_ ) + {} + + VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - D3D12FenceSubmitInfoKHR& operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + D3D12FenceSubmitInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ = {} ) + : waitSemaphoreValuesCount( static_cast( waitSemaphoreValues_.size() ) ), pWaitSemaphoreValues( waitSemaphoreValues_.data() ), signalSemaphoreValuesCount( static_cast( signalSemaphoreValues_.size() ) ), pSignalSemaphoreValues( signalSemaphoreValues_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + D3D12FenceSubmitInfoKHR & operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + D3D12FenceSubmitInfoKHR & operator=( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( D3D12FenceSubmitInfoKHR ) ); return *this; } @@ -26200,6 +26921,15 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + D3D12FenceSubmitInfoKHR & setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreValuesCount = static_cast( waitSemaphoreValues_.size() ); + pWaitSemaphoreValues = waitSemaphoreValues_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + D3D12FenceSubmitInfoKHR & setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT { signalSemaphoreValuesCount = signalSemaphoreValuesCount_; @@ -26212,6 +26942,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + D3D12FenceSubmitInfoKHR & setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreValuesCount = static_cast( signalSemaphoreValues_.size() ); + pSignalSemaphoreValues = signalSemaphoreValues_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkD3D12FenceSubmitInfoKHR const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -26222,6 +26962,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( D3D12FenceSubmitInfoKHR const& ) const = default; +#else bool operator==( D3D12FenceSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -26236,6 +26980,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR; @@ -26244,35 +26991,45 @@ namespace VULKAN_HPP_NAMESPACE const uint64_t* pWaitSemaphoreValues = {}; uint32_t signalSemaphoreValuesCount = {}; const uint64_t* pSignalSemaphoreValues = {}; + }; static_assert( sizeof( D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = D3D12FenceSubmitInfoKHR; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ struct DebugMarkerMarkerInfoEXT { - VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( const char* pMarkerName_ = {}, - std::array const& color_ = {} ) VULKAN_HPP_NOEXCEPT - : pMarkerName( pMarkerName_ ) - , color{} - { - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( color, color_ ); - } + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerMarkerInfoEXT; - VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT ) - offsetof( DebugMarkerMarkerInfoEXT, pNext ) ); - return *this; - } +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT(const char* pMarkerName_ = {}, std::array const& color_ = {}) VULKAN_HPP_NOEXCEPT + : pMarkerName( pMarkerName_ ), color( color_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - DebugMarkerMarkerInfoEXT& operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DebugMarkerMarkerInfoEXT & operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DebugMarkerMarkerInfoEXT & operator=( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DebugMarkerMarkerInfoEXT ) ); return *this; } @@ -26290,10 +27047,11 @@ namespace VULKAN_HPP_NAMESPACE DebugMarkerMarkerInfoEXT & setColor( std::array color_ ) VULKAN_HPP_NOEXCEPT { - memcpy( color, color_.data(), 4 * sizeof( float ) ); + color = color_; return *this; } + operator VkDebugMarkerMarkerInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -26304,52 +27062,69 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DebugMarkerMarkerInfoEXT const& ) const = default; +#else bool operator==( DebugMarkerMarkerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pMarkerName == rhs.pMarkerName ) - && ( memcmp( color, rhs.color, 4 * sizeof( float ) ) == 0 ); + && ( color == rhs.color ); } bool operator!=( DebugMarkerMarkerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT; const void* pNext = {}; const char* pMarkerName = {}; - float color[4] = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D color = {}; + }; static_assert( sizeof( DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = DebugMarkerMarkerInfoEXT; + }; + struct DebugMarkerObjectNameInfoEXT { - VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, - uint64_t object_ = {}, - const char* pObjectName_ = {} ) VULKAN_HPP_NOEXCEPT - : objectType( objectType_ ) - , object( object_ ) - , pObjectName( pObjectName_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectNameInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = {}, const char* pObjectName_ = {}) VULKAN_HPP_NOEXCEPT + : objectType( objectType_ ), object( object_ ), pObjectName( pObjectName_ ) {} - VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT ) - offsetof( DebugMarkerObjectNameInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - DebugMarkerObjectNameInfoEXT& operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DebugMarkerObjectNameInfoEXT & operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DebugMarkerObjectNameInfoEXT & operator=( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DebugMarkerObjectNameInfoEXT ) ); return *this; } @@ -26377,6 +27152,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkDebugMarkerObjectNameInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -26387,6 +27163,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DebugMarkerObjectNameInfoEXT const& ) const = default; +#else bool operator==( DebugMarkerObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -26400,6 +27180,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT; @@ -26407,38 +27190,51 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; uint64_t object = {}; const char* pObjectName = {}; + }; static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = DebugMarkerObjectNameInfoEXT; + }; + struct DebugMarkerObjectTagInfoEXT { - VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, - uint64_t object_ = {}, - uint64_t tagName_ = {}, - size_t tagSize_ = {}, - const void* pTag_ = {} ) VULKAN_HPP_NOEXCEPT - : objectType( objectType_ ) - , object( object_ ) - , tagName( tagName_ ) - , tagSize( tagSize_ ) - , pTag( pTag_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectTagInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = {}, uint64_t tagName_ = {}, size_t tagSize_ = {}, const void* pTag_ = {}) VULKAN_HPP_NOEXCEPT + : objectType( objectType_ ), object( object_ ), tagName( tagName_ ), tagSize( tagSize_ ), pTag( pTag_ ) {} - VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT ) - offsetof( DebugMarkerObjectTagInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - DebugMarkerObjectTagInfoEXT& operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + template + DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, uint64_t object_, uint64_t tagName_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ ) + : objectType( objectType_ ), object( object_ ), tagName( tagName_ ), tagSize( tag_.size() * sizeof(T) ), pTag( tag_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DebugMarkerObjectTagInfoEXT & operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DebugMarkerObjectTagInfoEXT & operator=( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DebugMarkerObjectTagInfoEXT ) ); return *this; } @@ -26478,6 +27274,17 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + template + DebugMarkerObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tag_.size() * sizeof(T); + pTag = tag_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkDebugMarkerObjectTagInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -26488,6 +27295,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DebugMarkerObjectTagInfoEXT const& ) const = default; +#else bool operator==( DebugMarkerObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -26503,6 +27314,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT; @@ -26512,34 +27326,44 @@ namespace VULKAN_HPP_NAMESPACE uint64_t tagName = {}; size_t tagSize = {}; const void* pTag = {}; + }; static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = DebugMarkerObjectTagInfoEXT; + }; + struct DebugReportCallbackCreateInfoEXT { - VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ = {}, - PFN_vkDebugReportCallbackEXT pfnCallback_ = {}, - void* pUserData_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , pfnCallback( pfnCallback_ ) - , pUserData( pUserData_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugReportCallbackCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ = {}, PFN_vkDebugReportCallbackEXT pfnCallback_ = {}, void* pUserData_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), pfnCallback( pfnCallback_ ), pUserData( pUserData_ ) {} - VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT ) - offsetof( DebugReportCallbackCreateInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - DebugReportCallbackCreateInfoEXT& operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DebugReportCallbackCreateInfoEXT & operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DebugReportCallbackCreateInfoEXT & operator=( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DebugReportCallbackCreateInfoEXT ) ); return *this; } @@ -26567,6 +27391,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkDebugReportCallbackCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -26577,6 +27402,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DebugReportCallbackCreateInfoEXT const& ) const = default; +#else bool operator==( DebugReportCallbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -26590,6 +27419,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT; @@ -26597,34 +27429,44 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags = {}; PFN_vkDebugReportCallbackEXT pfnCallback = {}; void* pUserData = {}; + }; static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = DebugReportCallbackCreateInfoEXT; + }; + struct DebugUtilsLabelEXT { - VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( const char* pLabelName_ = {}, - std::array const& color_ = {} ) VULKAN_HPP_NOEXCEPT - : pLabelName( pLabelName_ ) - , color{} - { - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( color, color_ ); - } + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsLabelEXT; - VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & operator=( VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT ) - offsetof( DebugUtilsLabelEXT, pNext ) ); - return *this; - } +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT(const char* pLabelName_ = {}, std::array const& color_ = {}) VULKAN_HPP_NOEXCEPT + : pLabelName( pLabelName_ ), color( color_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - DebugUtilsLabelEXT& operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DebugUtilsLabelEXT & operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DebugUtilsLabelEXT & operator=( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DebugUtilsLabelEXT ) ); return *this; } @@ -26642,10 +27484,11 @@ namespace VULKAN_HPP_NAMESPACE DebugUtilsLabelEXT & setColor( std::array color_ ) VULKAN_HPP_NOEXCEPT { - memcpy( color, color_.data(), 4 * sizeof( float ) ); + color = color_; return *this; } + operator VkDebugUtilsLabelEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -26656,52 +27499,69 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DebugUtilsLabelEXT const& ) const = default; +#else bool operator==( DebugUtilsLabelEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pLabelName == rhs.pLabelName ) - && ( memcmp( color, rhs.color, 4 * sizeof( float ) ) == 0 ); + && ( color == rhs.color ); } bool operator!=( DebugUtilsLabelEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsLabelEXT; const void* pNext = {}; const char* pLabelName = {}; - float color[4] = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D color = {}; + }; static_assert( sizeof( DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = DebugUtilsLabelEXT; + }; + struct DebugUtilsObjectNameInfoEXT { - VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, - uint64_t objectHandle_ = {}, - const char* pObjectName_ = {} ) VULKAN_HPP_NOEXCEPT - : objectType( objectType_ ) - , objectHandle( objectHandle_ ) - , pObjectName( pObjectName_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectNameInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, uint64_t objectHandle_ = {}, const char* pObjectName_ = {}) VULKAN_HPP_NOEXCEPT + : objectType( objectType_ ), objectHandle( objectHandle_ ), pObjectName( pObjectName_ ) {} - VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT ) - offsetof( DebugUtilsObjectNameInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - DebugUtilsObjectNameInfoEXT& operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DebugUtilsObjectNameInfoEXT & operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DebugUtilsObjectNameInfoEXT & operator=( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DebugUtilsObjectNameInfoEXT ) ); return *this; } @@ -26729,6 +27589,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkDebugUtilsObjectNameInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -26739,6 +27600,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DebugUtilsObjectNameInfoEXT const& ) const = default; +#else bool operator==( DebugUtilsObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -26752,6 +27617,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT; @@ -26759,48 +27627,50 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; uint64_t objectHandle = {}; const char* pObjectName = {}; + }; static_assert( sizeof( DebugUtilsObjectNameInfoEXT ) == sizeof( VkDebugUtilsObjectNameInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = DebugUtilsObjectNameInfoEXT; + }; + struct DebugUtilsMessengerCallbackDataEXT { - VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCallbackDataEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = {}, - const char* pMessageIdName_ = {}, - int32_t messageIdNumber_ = {}, - const char* pMessage_ = {}, - uint32_t queueLabelCount_ = {}, - const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels_ = {}, - uint32_t cmdBufLabelCount_ = {}, - const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels_ = {}, - uint32_t objectCount_ = {}, - const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , pMessageIdName( pMessageIdName_ ) - , messageIdNumber( messageIdNumber_ ) - , pMessage( pMessage_ ) - , queueLabelCount( queueLabelCount_ ) - , pQueueLabels( pQueueLabels_ ) - , cmdBufLabelCount( cmdBufLabelCount_ ) - , pCmdBufLabels( pCmdBufLabels_ ) - , objectCount( objectCount_ ) - , pObjects( pObjects_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCallbackDataEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = {}, const char* pMessageIdName_ = {}, int32_t messageIdNumber_ = {}, const char* pMessage_ = {}, uint32_t queueLabelCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels_ = {}, uint32_t cmdBufLabelCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels_ = {}, uint32_t objectCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), pMessageIdName( pMessageIdName_ ), messageIdNumber( messageIdNumber_ ), pMessage( pMessage_ ), queueLabelCount( queueLabelCount_ ), pQueueLabels( pQueueLabels_ ), cmdBufLabelCount( cmdBufLabelCount_ ), pCmdBufLabels( pCmdBufLabels_ ), objectCount( objectCount_ ), pObjects( pObjects_ ) {} - VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & operator=( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT ) - offsetof( DebugUtilsMessengerCallbackDataEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - DebugUtilsMessengerCallbackDataEXT& operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DebugUtilsMessengerCallbackDataEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_, const char* pMessageIdName_, int32_t messageIdNumber_, const char* pMessage_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueLabels_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & cmdBufLabels_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & objects_ = {} ) + : flags( flags_ ), pMessageIdName( pMessageIdName_ ), messageIdNumber( messageIdNumber_ ), pMessage( pMessage_ ), queueLabelCount( static_cast( queueLabels_.size() ) ), pQueueLabels( queueLabels_.data() ), cmdBufLabelCount( static_cast( cmdBufLabels_.size() ) ), pCmdBufLabels( cmdBufLabels_.data() ), objectCount( static_cast( objects_.size() ) ), pObjects( objects_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DebugUtilsMessengerCallbackDataEXT & operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DebugUtilsMessengerCallbackDataEXT & operator=( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DebugUtilsMessengerCallbackDataEXT ) ); return *this; } @@ -26846,6 +27716,15 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DebugUtilsMessengerCallbackDataEXT & setQueueLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueLabels_ ) VULKAN_HPP_NOEXCEPT + { + queueLabelCount = static_cast( queueLabels_.size() ); + pQueueLabels = queueLabels_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DebugUtilsMessengerCallbackDataEXT & setCmdBufLabelCount( uint32_t cmdBufLabelCount_ ) VULKAN_HPP_NOEXCEPT { cmdBufLabelCount = cmdBufLabelCount_; @@ -26858,6 +27737,15 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DebugUtilsMessengerCallbackDataEXT & setCmdBufLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & cmdBufLabels_ ) VULKAN_HPP_NOEXCEPT + { + cmdBufLabelCount = static_cast( cmdBufLabels_.size() ); + pCmdBufLabels = cmdBufLabels_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DebugUtilsMessengerCallbackDataEXT & setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT { objectCount = objectCount_; @@ -26870,6 +27758,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DebugUtilsMessengerCallbackDataEXT & setObjects( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & objects_ ) VULKAN_HPP_NOEXCEPT + { + objectCount = static_cast( objects_.size() ); + pObjects = objects_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkDebugUtilsMessengerCallbackDataEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -26880,6 +27778,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DebugUtilsMessengerCallbackDataEXT const& ) const = default; +#else bool operator==( DebugUtilsMessengerCallbackDataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -26900,6 +27802,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT; @@ -26914,38 +27819,44 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels = {}; uint32_t objectCount = {}; const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects = {}; + }; static_assert( sizeof( DebugUtilsMessengerCallbackDataEXT ) == sizeof( VkDebugUtilsMessengerCallbackDataEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = DebugUtilsMessengerCallbackDataEXT; + }; + struct DebugUtilsMessengerCreateInfoEXT { - VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {}, - VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {}, - VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {}, - PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = {}, - void* pUserData_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , messageSeverity( messageSeverity_ ) - , messageType( messageType_ ) - , pfnUserCallback( pfnUserCallback_ ) - , pUserData( pUserData_ ) + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {}, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {}, PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = {}, void* pUserData_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), messageSeverity( messageSeverity_ ), messageType( messageType_ ), pfnUserCallback( pfnUserCallback_ ), pUserData( pUserData_ ) {} - VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT ) - offsetof( DebugUtilsMessengerCreateInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - DebugUtilsMessengerCreateInfoEXT& operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCreateInfoEXT & operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DebugUtilsMessengerCreateInfoEXT & operator=( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DebugUtilsMessengerCreateInfoEXT ) ); return *this; } @@ -26985,6 +27896,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkDebugUtilsMessengerCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -26995,6 +27907,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DebugUtilsMessengerCreateInfoEXT const& ) const = default; +#else bool operator==( DebugUtilsMessengerCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -27010,6 +27926,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT; @@ -27019,38 +27938,51 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType = {}; PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback = {}; void* pUserData = {}; + }; static_assert( sizeof( DebugUtilsMessengerCreateInfoEXT ) == sizeof( VkDebugUtilsMessengerCreateInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = DebugUtilsMessengerCreateInfoEXT; + }; + struct DebugUtilsObjectTagInfoEXT { - VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, - uint64_t objectHandle_ = {}, - uint64_t tagName_ = {}, - size_t tagSize_ = {}, - const void* pTag_ = {} ) VULKAN_HPP_NOEXCEPT - : objectType( objectType_ ) - , objectHandle( objectHandle_ ) - , tagName( tagName_ ) - , tagSize( tagSize_ ) - , pTag( pTag_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectTagInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, uint64_t objectHandle_ = {}, uint64_t tagName_ = {}, size_t tagSize_ = {}, const void* pTag_ = {}) VULKAN_HPP_NOEXCEPT + : objectType( objectType_ ), objectHandle( objectHandle_ ), tagName( tagName_ ), tagSize( tagSize_ ), pTag( pTag_ ) {} - VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT ) - offsetof( DebugUtilsObjectTagInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - DebugUtilsObjectTagInfoEXT& operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + template + DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle_, uint64_t tagName_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ ) + : objectType( objectType_ ), objectHandle( objectHandle_ ), tagName( tagName_ ), tagSize( tag_.size() * sizeof(T) ), pTag( tag_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DebugUtilsObjectTagInfoEXT & operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DebugUtilsObjectTagInfoEXT & operator=( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DebugUtilsObjectTagInfoEXT ) ); return *this; } @@ -27090,6 +28022,17 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + template + DebugUtilsObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tag_.size() * sizeof(T); + pTag = tag_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkDebugUtilsObjectTagInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -27100,6 +28043,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DebugUtilsObjectTagInfoEXT const& ) const = default; +#else bool operator==( DebugUtilsObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -27115,6 +28062,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT; @@ -27124,30 +28074,44 @@ namespace VULKAN_HPP_NAMESPACE uint64_t tagName = {}; size_t tagSize = {}; const void* pTag = {}; + }; static_assert( sizeof( DebugUtilsObjectTagInfoEXT ) == sizeof( VkDebugUtilsObjectTagInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = DebugUtilsObjectTagInfoEXT; + }; + struct DedicatedAllocationBufferCreateInfoNV { - VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT - : dedicatedAllocation( dedicatedAllocation_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationBufferCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}) VULKAN_HPP_NOEXCEPT + : dedicatedAllocation( dedicatedAllocation_ ) {} - VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV ) - offsetof( DedicatedAllocationBufferCreateInfoNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - DedicatedAllocationBufferCreateInfoNV& operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + DedicatedAllocationBufferCreateInfoNV & operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DedicatedAllocationBufferCreateInfoNV & operator=( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DedicatedAllocationBufferCreateInfoNV ) ); return *this; } @@ -27163,6 +28127,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkDedicatedAllocationBufferCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -27173,6 +28138,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DedicatedAllocationBufferCreateInfoNV const& ) const = default; +#else bool operator==( DedicatedAllocationBufferCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -27184,35 +28153,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV; const void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {}; + }; static_assert( sizeof( DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = DedicatedAllocationBufferCreateInfoNV; + }; + struct DedicatedAllocationImageCreateInfoNV { - VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT - : dedicatedAllocation( dedicatedAllocation_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationImageCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}) VULKAN_HPP_NOEXCEPT + : dedicatedAllocation( dedicatedAllocation_ ) {} - VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV ) - offsetof( DedicatedAllocationImageCreateInfoNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - DedicatedAllocationImageCreateInfoNV& operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + DedicatedAllocationImageCreateInfoNV & operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DedicatedAllocationImageCreateInfoNV & operator=( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DedicatedAllocationImageCreateInfoNV ) ); return *this; } @@ -27228,6 +28214,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkDedicatedAllocationImageCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -27238,6 +28225,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DedicatedAllocationImageCreateInfoNV const& ) const = default; +#else bool operator==( DedicatedAllocationImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -27249,37 +28240,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV; const void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {}; + }; static_assert( sizeof( DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = DedicatedAllocationImageCreateInfoNV; + }; + struct DedicatedAllocationMemoryAllocateInfoNV { - VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::Image image_ = {}, - VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT - : image( image_ ) - , buffer( buffer_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}) VULKAN_HPP_NOEXCEPT + : image( image_ ), buffer( buffer_ ) {} - VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV & operator=( VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV ) - offsetof( DedicatedAllocationMemoryAllocateInfoNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - DedicatedAllocationMemoryAllocateInfoNV& operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + DedicatedAllocationMemoryAllocateInfoNV & operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DedicatedAllocationMemoryAllocateInfoNV & operator=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DedicatedAllocationMemoryAllocateInfoNV ) ); return *this; } @@ -27301,6 +28307,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkDedicatedAllocationMemoryAllocateInfoNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -27311,6 +28318,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DedicatedAllocationMemoryAllocateInfoNV const& ) const = default; +#else bool operator==( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -27323,34 +28334,237 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV; const void* pNext = {}; VULKAN_HPP_NAMESPACE::Image image = {}; VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; static_assert( sizeof( DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = DedicatedAllocationMemoryAllocateInfoNV; + }; + +#ifdef VK_ENABLE_BETA_EXTENSIONS + class DeferredOperationKHR + { + public: + using CType = VkDeferredOperationKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR DeferredOperationKHR() VULKAN_HPP_NOEXCEPT + : m_deferredOperationKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DeferredOperationKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_deferredOperationKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DeferredOperationKHR( VkDeferredOperationKHR deferredOperationKHR ) VULKAN_HPP_NOEXCEPT + : m_deferredOperationKHR( deferredOperationKHR ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DeferredOperationKHR & operator=(VkDeferredOperationKHR deferredOperationKHR) VULKAN_HPP_NOEXCEPT + { + m_deferredOperationKHR = deferredOperationKHR; + return *this; + } +#endif + + DeferredOperationKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_deferredOperationKHR = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeferredOperationKHR const& ) const = default; +#else + bool operator==( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR == rhs.m_deferredOperationKHR; + } + + bool operator!=(DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR != rhs.m_deferredOperationKHR; + } + + bool operator<(DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR < rhs.m_deferredOperationKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeferredOperationKHR() const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR == VK_NULL_HANDLE; + } + + private: + VkDeferredOperationKHR m_deferredOperationKHR; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DeferredOperationKHR ) == sizeof( VkDeferredOperationKHR ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DeferredOperationKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DeferredOperationKHR; + }; + + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct DeferredOperationInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeferredOperationInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeferredOperationInfoKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operationHandle_ = {}) VULKAN_HPP_NOEXCEPT + : operationHandle( operationHandle_ ) + {} + + VULKAN_HPP_CONSTEXPR DeferredOperationInfoKHR( DeferredOperationInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeferredOperationInfoKHR( VkDeferredOperationInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DeferredOperationInfoKHR & operator=( VkDeferredOperationInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeferredOperationInfoKHR & operator=( DeferredOperationInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DeferredOperationInfoKHR ) ); + return *this; + } + + DeferredOperationInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeferredOperationInfoKHR & setOperationHandle( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operationHandle_ ) VULKAN_HPP_NOEXCEPT + { + operationHandle = operationHandle_; + return *this; + } + + + operator VkDeferredOperationInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeferredOperationInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeferredOperationInfoKHR const& ) const = default; +#else + bool operator==( DeferredOperationInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( operationHandle == rhs.operationHandle ); + } + + bool operator!=( DeferredOperationInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeferredOperationInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeferredOperationKHR operationHandle = {}; + + }; + static_assert( sizeof( DeferredOperationInfoKHR ) == sizeof( VkDeferredOperationInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeferredOperationInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + struct DescriptorBufferInfo { - VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT - : buffer( buffer_ ) - , offset( offset_ ) - , range( range_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorBufferInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize range_ = {}) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ), offset( offset_ ), range( range_ ) {} + VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - DescriptorBufferInfo& operator=( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorBufferInfo & operator=( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DescriptorBufferInfo & operator=( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DescriptorBufferInfo ) ); return *this; } @@ -27372,6 +28586,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkDescriptorBufferInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -27382,6 +28597,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorBufferInfo const& ) const = default; +#else bool operator==( DescriptorBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( buffer == rhs.buffer ) @@ -27393,33 +28612,245 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: VULKAN_HPP_NAMESPACE::Buffer buffer = {}; VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; VULKAN_HPP_NAMESPACE::DeviceSize range = {}; + }; static_assert( sizeof( DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + class Sampler + { + public: + using CType = VkSampler; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSampler; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler; + + public: + VULKAN_HPP_CONSTEXPR Sampler() VULKAN_HPP_NOEXCEPT + : m_sampler(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_sampler(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler ) VULKAN_HPP_NOEXCEPT + : m_sampler( sampler ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Sampler & operator=(VkSampler sampler) VULKAN_HPP_NOEXCEPT + { + m_sampler = sampler; + return *this; + } +#endif + + Sampler & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_sampler = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Sampler const& ) const = default; +#else + bool operator==( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_sampler == rhs.m_sampler; + } + + bool operator!=(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_sampler != rhs.m_sampler; + } + + bool operator<(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_sampler < rhs.m_sampler; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const VULKAN_HPP_NOEXCEPT + { + return m_sampler; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_sampler != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_sampler == VK_NULL_HANDLE; + } + + private: + VkSampler m_sampler; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Sampler ) == sizeof( VkSampler ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Sampler; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Sampler; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Sampler; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class ImageView + { + public: + using CType = VkImageView; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImageView; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView; + + public: + VULKAN_HPP_CONSTEXPR ImageView() VULKAN_HPP_NOEXCEPT + : m_imageView(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_imageView(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView ) VULKAN_HPP_NOEXCEPT + : m_imageView( imageView ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + ImageView & operator=(VkImageView imageView) VULKAN_HPP_NOEXCEPT + { + m_imageView = imageView; + return *this; + } +#endif + + ImageView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_imageView = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageView const& ) const = default; +#else + bool operator==( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_imageView == rhs.m_imageView; + } + + bool operator!=(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_imageView != rhs.m_imageView; + } + + bool operator<(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_imageView < rhs.m_imageView; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const VULKAN_HPP_NOEXCEPT + { + return m_imageView; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_imageView != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_imageView == VK_NULL_HANDLE; + } + + private: + VkImageView m_imageView; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::ImageView ) == sizeof( VkImageView ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::ImageView; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ImageView; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ImageView; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + struct DescriptorImageInfo { - VULKAN_HPP_CONSTEXPR DescriptorImageInfo( VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, - VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT - : sampler( sampler_ ) - , imageView( imageView_ ) - , imageLayout( imageLayout_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorImageInfo(VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT + : sampler( sampler_ ), imageView( imageView_ ), imageLayout( imageLayout_ ) {} + VULKAN_HPP_CONSTEXPR DescriptorImageInfo( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - DescriptorImageInfo& operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorImageInfo & operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DescriptorImageInfo & operator=( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DescriptorImageInfo ) ); return *this; } @@ -27441,6 +28872,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkDescriptorImageInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -27451,6 +28883,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorImageInfo const& ) const = default; +#else bool operator==( DescriptorImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sampler == rhs.sampler ) @@ -27462,31 +28898,45 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: VULKAN_HPP_NAMESPACE::Sampler sampler = {}; VULKAN_HPP_NAMESPACE::ImageView imageView = {}; VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; static_assert( sizeof( DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DescriptorPoolSize { - VULKAN_HPP_CONSTEXPR DescriptorPoolSize( VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, - uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - , descriptorCount( descriptorCount_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorPoolSize(VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, uint32_t descriptorCount_ = {}) VULKAN_HPP_NOEXCEPT + : type( type_ ), descriptorCount( descriptorCount_ ) {} + VULKAN_HPP_CONSTEXPR DescriptorPoolSize( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - DescriptorPoolSize& operator=( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorPoolSize & operator=( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DescriptorPoolSize & operator=( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DescriptorPoolSize ) ); return *this; } @@ -27502,6 +28952,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkDescriptorPoolSize const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -27512,6 +28963,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorPoolSize const& ) const = default; +#else bool operator==( DescriptorPoolSize const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( type == rhs.type ) @@ -27522,40 +28977,51 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; uint32_t descriptorCount = {}; + }; static_assert( sizeof( DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DescriptorPoolCreateInfo { - VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ = {}, - uint32_t maxSets_ = {}, - uint32_t poolSizeCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , maxSets( maxSets_ ) - , poolSizeCount( poolSizeCount_ ) - , pPoolSizes( pPoolSizes_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ = {}, uint32_t maxSets_ = {}, uint32_t poolSizeCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), maxSets( maxSets_ ), poolSizeCount( poolSizeCount_ ), pPoolSizes( pPoolSizes_ ) {} - VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & operator=( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo ) - offsetof( DescriptorPoolCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - DescriptorPoolCreateInfo& operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_, uint32_t maxSets_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & poolSizes_ ) + : flags( flags_ ), maxSets( maxSets_ ), poolSizeCount( static_cast( poolSizes_.size() ) ), pPoolSizes( poolSizes_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DescriptorPoolCreateInfo & operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DescriptorPoolCreateInfo & operator=( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DescriptorPoolCreateInfo ) ); return *this; } @@ -27589,6 +29055,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DescriptorPoolCreateInfo & setPoolSizes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & poolSizes_ ) VULKAN_HPP_NOEXCEPT + { + poolSizeCount = static_cast( poolSizes_.size() ); + pPoolSizes = poolSizes_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkDescriptorPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -27599,6 +29075,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorPoolCreateInfo const& ) const = default; +#else bool operator==( DescriptorPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -27613,6 +29093,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolCreateInfo; @@ -27621,30 +29104,44 @@ namespace VULKAN_HPP_NAMESPACE uint32_t maxSets = {}; uint32_t poolSizeCount = {}; const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes = {}; + }; static_assert( sizeof( DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = DescriptorPoolCreateInfo; + }; + struct DescriptorPoolInlineUniformBlockCreateInfoEXT { - VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfoEXT( uint32_t maxInlineUniformBlockBindings_ = {} ) VULKAN_HPP_NOEXCEPT - : maxInlineUniformBlockBindings( maxInlineUniformBlockBindings_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfoEXT(uint32_t maxInlineUniformBlockBindings_ = {}) VULKAN_HPP_NOEXCEPT + : maxInlineUniformBlockBindings( maxInlineUniformBlockBindings_ ) {} - VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfoEXT ) - offsetof( DescriptorPoolInlineUniformBlockCreateInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfoEXT( DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorPoolInlineUniformBlockCreateInfoEXT( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - DescriptorPoolInlineUniformBlockCreateInfoEXT& operator=( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorPoolInlineUniformBlockCreateInfoEXT & operator=( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DescriptorPoolInlineUniformBlockCreateInfoEXT & operator=( DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) ); return *this; } @@ -27660,6 +29157,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -27670,6 +29168,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorPoolInlineUniformBlockCreateInfoEXT const& ) const = default; +#else bool operator==( DescriptorPoolInlineUniformBlockCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -27681,39 +29183,258 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT; const void* pNext = {}; uint32_t maxInlineUniformBlockBindings = {}; + }; static_assert( sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) == sizeof( VkDescriptorPoolInlineUniformBlockCreateInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct DescriptorSetAllocateInfo + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ = {}, - uint32_t descriptorSetCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ = {} ) VULKAN_HPP_NOEXCEPT - : descriptorPool( descriptorPool_ ) - , descriptorSetCount( descriptorSetCount_ ) - , pSetLayouts( pSetLayouts_ ) + using Type = DescriptorPoolInlineUniformBlockCreateInfoEXT; + }; + + class DescriptorPool + { + public: + using CType = VkDescriptorPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool; + + public: + VULKAN_HPP_CONSTEXPR DescriptorPool() VULKAN_HPP_NOEXCEPT + : m_descriptorPool(VK_NULL_HANDLE) {} - VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & operator=( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DescriptorPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_descriptorPool(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT + : m_descriptorPool( descriptorPool ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DescriptorPool & operator=(VkDescriptorPool descriptorPool) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo ) - offsetof( DescriptorSetAllocateInfo, pNext ) ); + m_descriptorPool = descriptorPool; return *this; } +#endif + + DescriptorPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorPool = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorPool const& ) const = default; +#else + bool operator==( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool == rhs.m_descriptorPool; + } + + bool operator!=(DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool != rhs.m_descriptorPool; + } + + bool operator<(DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool < rhs.m_descriptorPool; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorPool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool == VK_NULL_HANDLE; + } + + private: + VkDescriptorPool m_descriptorPool; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPool ) == sizeof( VkDescriptorPool ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DescriptorPool; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorPool; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorPool; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DescriptorSetLayout + { + public: + using CType = VkDescriptorSetLayout; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout; + + public: + VULKAN_HPP_CONSTEXPR DescriptorSetLayout() VULKAN_HPP_NOEXCEPT + : m_descriptorSetLayout(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DescriptorSetLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_descriptorSetLayout(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSetLayout( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT + : m_descriptorSetLayout( descriptorSetLayout ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DescriptorSetLayout & operator=(VkDescriptorSetLayout descriptorSetLayout) VULKAN_HPP_NOEXCEPT + { + m_descriptorSetLayout = descriptorSetLayout; + return *this; + } +#endif + + DescriptorSetLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSetLayout = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorSetLayout const& ) const = default; +#else + bool operator==( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout == rhs.m_descriptorSetLayout; + } + + bool operator!=(DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout != rhs.m_descriptorSetLayout; + } + + bool operator<(DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout < rhs.m_descriptorSetLayout; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSetLayout() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout == VK_NULL_HANDLE; + } + + private: + VkDescriptorSetLayout m_descriptorSetLayout; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct DescriptorSetAllocateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ = {}, uint32_t descriptorSetCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ = {}) VULKAN_HPP_NOEXCEPT + : descriptorPool( descriptorPool_ ), descriptorSetCount( descriptorSetCount_ ), pSetLayouts( pSetLayouts_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - DescriptorSetAllocateInfo& operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DescriptorSetAllocateInfo( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_ ) + : descriptorPool( descriptorPool_ ), descriptorSetCount( static_cast( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DescriptorSetAllocateInfo & operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DescriptorSetAllocateInfo & operator=( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DescriptorSetAllocateInfo ) ); return *this; } @@ -27741,6 +29462,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DescriptorSetAllocateInfo & setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetCount = static_cast( setLayouts_.size() ); + pSetLayouts = setLayouts_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkDescriptorSetAllocateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -27751,6 +29482,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorSetAllocateInfo const& ) const = default; +#else bool operator==( DescriptorSetAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -27764,6 +29499,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetAllocateInfo; @@ -27771,32 +29509,49 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool = {}; uint32_t descriptorSetCount = {}; const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts = {}; + }; static_assert( sizeof( DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = DescriptorSetAllocateInfo; + }; + struct DescriptorSetLayoutBinding { - VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( uint32_t binding_ = {}, - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, - uint32_t descriptorCount_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, - const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers_ = {} ) VULKAN_HPP_NOEXCEPT - : binding( binding_ ) - , descriptorType( descriptorType_ ) - , descriptorCount( descriptorCount_ ) - , stageFlags( stageFlags_ ) - , pImmutableSamplers( pImmutableSamplers_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding(uint32_t binding_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers_ = {}) VULKAN_HPP_NOEXCEPT + : binding( binding_ ), descriptorType( descriptorType_ ), descriptorCount( descriptorCount_ ), stageFlags( stageFlags_ ), pImmutableSamplers( pImmutableSamplers_ ) {} + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - DescriptorSetLayoutBinding& operator=( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DescriptorSetLayoutBinding( uint32_t binding_, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & immutableSamplers_ ) + : binding( binding_ ), descriptorType( descriptorType_ ), descriptorCount( static_cast( immutableSamplers_.size() ) ), stageFlags( stageFlags_ ), pImmutableSamplers( immutableSamplers_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DescriptorSetLayoutBinding & operator=( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DescriptorSetLayoutBinding & operator=( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DescriptorSetLayoutBinding ) ); return *this; } @@ -27830,6 +29585,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DescriptorSetLayoutBinding & setImmutableSamplers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & immutableSamplers_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = static_cast( immutableSamplers_.size() ); + pImmutableSamplers = immutableSamplers_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkDescriptorSetLayoutBinding const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -27840,6 +29605,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorSetLayoutBinding const& ) const = default; +#else bool operator==( DescriptorSetLayoutBinding const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( binding == rhs.binding ) @@ -27853,6 +29622,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: uint32_t binding = {}; @@ -27860,32 +29632,44 @@ namespace VULKAN_HPP_NAMESPACE uint32_t descriptorCount = {}; VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers = {}; + }; static_assert( sizeof( DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DescriptorSetLayoutBindingFlagsCreateInfo { - VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( uint32_t bindingCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags* pBindingFlags_ = {} ) VULKAN_HPP_NOEXCEPT - : bindingCount( bindingCount_ ) - , pBindingFlags( pBindingFlags_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo(uint32_t bindingCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags* pBindingFlags_ = {}) VULKAN_HPP_NOEXCEPT + : bindingCount( bindingCount_ ), pBindingFlags( pBindingFlags_ ) {} - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo & operator=( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo ) - offsetof( DescriptorSetLayoutBindingFlagsCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorSetLayoutBindingFlagsCreateInfo( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - DescriptorSetLayoutBindingFlagsCreateInfo& operator=( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DescriptorSetLayoutBindingFlagsCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bindingFlags_ ) + : bindingCount( static_cast( bindingFlags_.size() ) ), pBindingFlags( bindingFlags_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DescriptorSetLayoutBindingFlagsCreateInfo & operator=( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DescriptorSetLayoutBindingFlagsCreateInfo & operator=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DescriptorSetLayoutBindingFlagsCreateInfo ) ); return *this; } @@ -27907,6 +29691,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DescriptorSetLayoutBindingFlagsCreateInfo & setBindingFlags( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bindingFlags_ ) VULKAN_HPP_NOEXCEPT + { + bindingCount = static_cast( bindingFlags_.size() ); + pBindingFlags = bindingFlags_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkDescriptorSetLayoutBindingFlagsCreateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -27917,6 +29711,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorSetLayoutBindingFlagsCreateInfo const& ) const = default; +#else bool operator==( DescriptorSetLayoutBindingFlagsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -27929,40 +29727,60 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo; const void* pNext = {}; uint32_t bindingCount = {}; const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags* pBindingFlags = {}; + }; static_assert( sizeof( DescriptorSetLayoutBindingFlagsCreateInfo ) == sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = DescriptorSetLayoutBindingFlagsCreateInfo; + }; + using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo; + struct DescriptorSetLayoutCreateInfo { - VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ = {}, - uint32_t bindingCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , bindingCount( bindingCount_ ) - , pBindings( pBindings_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ = {}, uint32_t bindingCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), bindingCount( bindingCount_ ), pBindings( pBindings_ ) {} - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & operator=( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo ) - offsetof( DescriptorSetLayoutCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - DescriptorSetLayoutCreateInfo& operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DescriptorSetLayoutCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bindings_ ) + : flags( flags_ ), bindingCount( static_cast( bindings_.size() ) ), pBindings( bindings_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DescriptorSetLayoutCreateInfo & operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DescriptorSetLayoutCreateInfo & operator=( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DescriptorSetLayoutCreateInfo ) ); return *this; } @@ -27990,6 +29808,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DescriptorSetLayoutCreateInfo & setBindings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bindings_ ) VULKAN_HPP_NOEXCEPT + { + bindingCount = static_cast( bindings_.size() ); + pBindings = bindings_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkDescriptorSetLayoutCreateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -28000,6 +29828,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorSetLayoutCreateInfo const& ) const = default; +#else bool operator==( DescriptorSetLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -28013,6 +29845,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo; @@ -28020,33 +29855,48 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags = {}; uint32_t bindingCount = {}; const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings = {}; + }; static_assert( sizeof( DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = DescriptorSetLayoutCreateInfo; + }; + struct DescriptorSetLayoutSupport { - DescriptorSetLayoutSupport( VULKAN_HPP_NAMESPACE::Bool32 supported_ = {} ) VULKAN_HPP_NOEXCEPT - : supported( supported_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutSupport; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport(VULKAN_HPP_NAMESPACE::Bool32 supported_ = {}) VULKAN_HPP_NOEXCEPT + : supported( supported_ ) {} - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & operator=( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport ) - offsetof( DescriptorSetLayoutSupport, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorSetLayoutSupport( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - DescriptorSetLayoutSupport& operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutSupport & operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + DescriptorSetLayoutSupport & operator=( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DescriptorSetLayoutSupport ) ); + return *this; + } + + operator VkDescriptorSetLayoutSupport const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -28057,6 +29907,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorSetLayoutSupport const& ) const = default; +#else bool operator==( DescriptorSetLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -28068,37 +29922,59 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutSupport; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 supported = {}; + }; static_assert( sizeof( DescriptorSetLayoutSupport ) == sizeof( VkDescriptorSetLayoutSupport ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = DescriptorSetLayoutSupport; + }; + using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport; + struct DescriptorSetVariableDescriptorCountAllocateInfo { - VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo( uint32_t descriptorSetCount_ = {}, - const uint32_t* pDescriptorCounts_ = {} ) VULKAN_HPP_NOEXCEPT - : descriptorSetCount( descriptorSetCount_ ) - , pDescriptorCounts( pDescriptorCounts_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo(uint32_t descriptorSetCount_ = {}, const uint32_t* pDescriptorCounts_ = {}) VULKAN_HPP_NOEXCEPT + : descriptorSetCount( descriptorSetCount_ ), pDescriptorCounts( pDescriptorCounts_ ) {} - VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo & operator=( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo ) - offsetof( DescriptorSetVariableDescriptorCountAllocateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorSetVariableDescriptorCountAllocateInfo( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - DescriptorSetVariableDescriptorCountAllocateInfo& operator=( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DescriptorSetVariableDescriptorCountAllocateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorCounts_ ) + : descriptorSetCount( static_cast( descriptorCounts_.size() ) ), pDescriptorCounts( descriptorCounts_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DescriptorSetVariableDescriptorCountAllocateInfo & operator=( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DescriptorSetVariableDescriptorCountAllocateInfo & operator=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DescriptorSetVariableDescriptorCountAllocateInfo ) ); return *this; } @@ -28120,6 +29996,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorCounts_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetCount = static_cast( descriptorCounts_.size() ); + pDescriptorCounts = descriptorCounts_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkDescriptorSetVariableDescriptorCountAllocateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -28130,6 +30016,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorSetVariableDescriptorCountAllocateInfo const& ) const = default; +#else bool operator==( DescriptorSetVariableDescriptorCountAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -28142,39 +30032,58 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo; const void* pNext = {}; uint32_t descriptorSetCount = {}; const uint32_t* pDescriptorCounts = {}; + }; static_assert( sizeof( DescriptorSetVariableDescriptorCountAllocateInfo ) == sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = DescriptorSetVariableDescriptorCountAllocateInfo; + }; + using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo; + struct DescriptorSetVariableDescriptorCountLayoutSupport { - DescriptorSetVariableDescriptorCountLayoutSupport( uint32_t maxVariableDescriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT - : maxVariableDescriptorCount( maxVariableDescriptorCount_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport(uint32_t maxVariableDescriptorCount_ = {}) VULKAN_HPP_NOEXCEPT + : maxVariableDescriptorCount( maxVariableDescriptorCount_ ) {} - VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport & operator=( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport ) - offsetof( DescriptorSetVariableDescriptorCountLayoutSupport, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorSetVariableDescriptorCountLayoutSupport( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - DescriptorSetVariableDescriptorCountLayoutSupport& operator=( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorSetVariableDescriptorCountLayoutSupport & operator=( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + DescriptorSetVariableDescriptorCountLayoutSupport & operator=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DescriptorSetVariableDescriptorCountLayoutSupport ) ); + return *this; + } + + operator VkDescriptorSetVariableDescriptorCountLayoutSupport const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -28185,6 +30094,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorSetVariableDescriptorCountLayoutSupport const& ) const = default; +#else bool operator==( DescriptorSetVariableDescriptorCountLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -28196,39 +30109,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport; void* pNext = {}; uint32_t maxVariableDescriptorCount = {}; + }; static_assert( sizeof( DescriptorSetVariableDescriptorCountLayoutSupport ) == sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupport ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = DescriptorSetVariableDescriptorCountLayoutSupport; + }; + using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport; + struct DescriptorUpdateTemplateEntry { - VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( uint32_t dstBinding_ = {}, - uint32_t dstArrayElement_ = {}, - uint32_t descriptorCount_ = {}, - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, - size_t offset_ = {}, - size_t stride_ = {} ) VULKAN_HPP_NOEXCEPT - : dstBinding( dstBinding_ ) - , dstArrayElement( dstArrayElement_ ) - , descriptorCount( descriptorCount_ ) - , descriptorType( descriptorType_ ) - , offset( offset_ ) - , stride( stride_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry(uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, size_t offset_ = {}, size_t stride_ = {}) VULKAN_HPP_NOEXCEPT + : dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( descriptorCount_ ), descriptorType( descriptorType_ ), offset( offset_ ), stride( stride_ ) {} + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - DescriptorUpdateTemplateEntry& operator=( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorUpdateTemplateEntry & operator=( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DescriptorUpdateTemplateEntry & operator=( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DescriptorUpdateTemplateEntry ) ); return *this; } @@ -28268,6 +30194,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkDescriptorUpdateTemplateEntry const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -28278,6 +30205,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorUpdateTemplateEntry const& ) const = default; +#else bool operator==( DescriptorUpdateTemplateEntry const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( dstBinding == rhs.dstBinding ) @@ -28292,6 +30223,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: uint32_t dstBinding = {}; @@ -28300,44 +30234,45 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; size_t offset = {}; size_t stride = {}; + }; static_assert( sizeof( DescriptorUpdateTemplateEntry ) == sizeof( VkDescriptorUpdateTemplateEntry ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry; struct DescriptorUpdateTemplateCreateInfo { - VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ = {}, - uint32_t descriptorUpdateEntryCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ = {}, - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, - VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, - VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, - uint32_t set_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , descriptorUpdateEntryCount( descriptorUpdateEntryCount_ ) - , pDescriptorUpdateEntries( pDescriptorUpdateEntries_ ) - , templateType( templateType_ ) - , descriptorSetLayout( descriptorSetLayout_ ) - , pipelineBindPoint( pipelineBindPoint_ ) - , pipelineLayout( pipelineLayout_ ) - , set( set_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorUpdateTemplateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ = {}, uint32_t descriptorUpdateEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ = {}, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, uint32_t set_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), descriptorUpdateEntryCount( descriptorUpdateEntryCount_ ), pDescriptorUpdateEntries( pDescriptorUpdateEntries_ ), templateType( templateType_ ), descriptorSetLayout( descriptorSetLayout_ ), pipelineBindPoint( pipelineBindPoint_ ), pipelineLayout( pipelineLayout_ ), set( set_ ) {} - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo ) - offsetof( DescriptorUpdateTemplateCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - DescriptorUpdateTemplateCreateInfo& operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DescriptorUpdateTemplateCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorUpdateEntries_, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, uint32_t set_ = {} ) + : flags( flags_ ), descriptorUpdateEntryCount( static_cast( descriptorUpdateEntries_.size() ) ), pDescriptorUpdateEntries( descriptorUpdateEntries_.data() ), templateType( templateType_ ), descriptorSetLayout( descriptorSetLayout_ ), pipelineBindPoint( pipelineBindPoint_ ), pipelineLayout( pipelineLayout_ ), set( set_ ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DescriptorUpdateTemplateCreateInfo & operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DescriptorUpdateTemplateCreateInfo & operator=( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DescriptorUpdateTemplateCreateInfo ) ); return *this; } @@ -28365,6 +30300,15 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT + { + descriptorUpdateEntryCount = static_cast( descriptorUpdateEntries_.size() ); + pDescriptorUpdateEntries = descriptorUpdateEntries_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DescriptorUpdateTemplateCreateInfo & setTemplateType( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ ) VULKAN_HPP_NOEXCEPT { templateType = templateType_; @@ -28395,6 +30339,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkDescriptorUpdateTemplateCreateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -28405,6 +30350,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorUpdateTemplateCreateInfo const& ) const = default; +#else bool operator==( DescriptorUpdateTemplateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -28423,6 +30372,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo; @@ -28435,36 +30387,51 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {}; uint32_t set = {}; + }; static_assert( sizeof( DescriptorUpdateTemplateCreateInfo ) == sizeof( VkDescriptorUpdateTemplateCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = DescriptorUpdateTemplateCreateInfo; + }; + using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo; + struct DeviceQueueCreateInfo { - VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, - uint32_t queueFamilyIndex_ = {}, - uint32_t queueCount_ = {}, - const float* pQueuePriorities_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , queueFamilyIndex( queueFamilyIndex_ ) - , queueCount( queueCount_ ) - , pQueuePriorities( pQueuePriorities_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo(VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}, uint32_t queueCount_ = {}, const float* pQueuePriorities_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ ), queueCount( queueCount_ ), pQueuePriorities( pQueuePriorities_ ) {} - VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo ) - offsetof( DeviceQueueCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - DeviceQueueCreateInfo& operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_, uint32_t queueFamilyIndex_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queuePriorities_ ) + : flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ ), queueCount( static_cast( queuePriorities_.size() ) ), pQueuePriorities( queuePriorities_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DeviceQueueCreateInfo & operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceQueueCreateInfo & operator=( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DeviceQueueCreateInfo ) ); return *this; } @@ -28498,6 +30465,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DeviceQueueCreateInfo & setQueuePriorities( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queuePriorities_ ) VULKAN_HPP_NOEXCEPT + { + queueCount = static_cast( queuePriorities_.size() ); + pQueuePriorities = queuePriorities_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkDeviceQueueCreateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -28508,6 +30485,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceQueueCreateInfo const& ) const = default; +#else bool operator==( DeviceQueueCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -28522,6 +30503,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueCreateInfo; @@ -28530,132 +30514,43 @@ namespace VULKAN_HPP_NAMESPACE uint32_t queueFamilyIndex = {}; uint32_t queueCount = {}; const float* pQueuePriorities = {}; + }; static_assert( sizeof( DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = DeviceQueueCreateInfo; + }; + struct PhysicalDeviceFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {} ) VULKAN_HPP_NOEXCEPT - : robustBufferAccess( robustBufferAccess_ ) - , fullDrawIndexUint32( fullDrawIndexUint32_ ) - , imageCubeArray( imageCubeArray_ ) - , independentBlend( independentBlend_ ) - , geometryShader( geometryShader_ ) - , tessellationShader( tessellationShader_ ) - , sampleRateShading( sampleRateShading_ ) - , dualSrcBlend( dualSrcBlend_ ) - , logicOp( logicOp_ ) - , multiDrawIndirect( multiDrawIndirect_ ) - , drawIndirectFirstInstance( drawIndirectFirstInstance_ ) - , depthClamp( depthClamp_ ) - , depthBiasClamp( depthBiasClamp_ ) - , fillModeNonSolid( fillModeNonSolid_ ) - , depthBounds( depthBounds_ ) - , wideLines( wideLines_ ) - , largePoints( largePoints_ ) - , alphaToOne( alphaToOne_ ) - , multiViewport( multiViewport_ ) - , samplerAnisotropy( samplerAnisotropy_ ) - , textureCompressionETC2( textureCompressionETC2_ ) - , textureCompressionASTC_LDR( textureCompressionASTC_LDR_ ) - , textureCompressionBC( textureCompressionBC_ ) - , occlusionQueryPrecise( occlusionQueryPrecise_ ) - , pipelineStatisticsQuery( pipelineStatisticsQuery_ ) - , vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ ) - , fragmentStoresAndAtomics( fragmentStoresAndAtomics_ ) - , shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ ) - , shaderImageGatherExtended( shaderImageGatherExtended_ ) - , shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ ) - , shaderStorageImageMultisample( shaderStorageImageMultisample_ ) - , shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ ) - , shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ ) - , shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ ) - , shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ ) - , shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ ) - , shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ ) - , shaderClipDistance( shaderClipDistance_ ) - , shaderCullDistance( shaderCullDistance_ ) - , shaderFloat64( shaderFloat64_ ) - , shaderInt64( shaderInt64_ ) - , shaderInt16( shaderInt16_ ) - , shaderResourceResidency( shaderResourceResidency_ ) - , shaderResourceMinLod( shaderResourceMinLod_ ) - , sparseBinding( sparseBinding_ ) - , sparseResidencyBuffer( sparseResidencyBuffer_ ) - , sparseResidencyImage2D( sparseResidencyImage2D_ ) - , sparseResidencyImage3D( sparseResidencyImage3D_ ) - , sparseResidency2Samples( sparseResidency2Samples_ ) - , sparseResidency4Samples( sparseResidency4Samples_ ) - , sparseResidency8Samples( sparseResidency8Samples_ ) - , sparseResidency16Samples( sparseResidency16Samples_ ) - , sparseResidencyAliased( sparseResidencyAliased_ ) - , variableMultisampleRate( variableMultisampleRate_ ) - , inheritedQueries( inheritedQueries_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures(VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = {}, VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = {}, VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = {}, VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = {}, VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = {}, VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {}, VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {}) VULKAN_HPP_NOEXCEPT + : robustBufferAccess( robustBufferAccess_ ), fullDrawIndexUint32( fullDrawIndexUint32_ ), imageCubeArray( imageCubeArray_ ), independentBlend( independentBlend_ ), geometryShader( geometryShader_ ), tessellationShader( tessellationShader_ ), sampleRateShading( sampleRateShading_ ), dualSrcBlend( dualSrcBlend_ ), logicOp( logicOp_ ), multiDrawIndirect( multiDrawIndirect_ ), drawIndirectFirstInstance( drawIndirectFirstInstance_ ), depthClamp( depthClamp_ ), depthBiasClamp( depthBiasClamp_ ), fillModeNonSolid( fillModeNonSolid_ ), depthBounds( depthBounds_ ), wideLines( wideLines_ ), largePoints( largePoints_ ), alphaToOne( alphaToOne_ ), multiViewport( multiViewport_ ), samplerAnisotropy( samplerAnisotropy_ ), textureCompressionETC2( textureCompressionETC2_ ), textureCompressionASTC_LDR( textureCompressionASTC_LDR_ ), textureCompressionBC( textureCompressionBC_ ), occlusionQueryPrecise( occlusionQueryPrecise_ ), pipelineStatisticsQuery( pipelineStatisticsQuery_ ), vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ ), fragmentStoresAndAtomics( fragmentStoresAndAtomics_ ), shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ ), shaderImageGatherExtended( shaderImageGatherExtended_ ), shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ ), shaderStorageImageMultisample( shaderStorageImageMultisample_ ), shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ ), shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ ), shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ ), shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ ), shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ ), shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ ), shaderClipDistance( shaderClipDistance_ ), shaderCullDistance( shaderCullDistance_ ), shaderFloat64( shaderFloat64_ ), shaderInt64( shaderInt64_ ), shaderInt16( shaderInt16_ ), shaderResourceResidency( shaderResourceResidency_ ), shaderResourceMinLod( shaderResourceMinLod_ ), sparseBinding( sparseBinding_ ), sparseResidencyBuffer( sparseResidencyBuffer_ ), sparseResidencyImage2D( sparseResidencyImage2D_ ), sparseResidencyImage3D( sparseResidencyImage3D_ ), sparseResidency2Samples( sparseResidency2Samples_ ), sparseResidency4Samples( sparseResidency4Samples_ ), sparseResidency8Samples( sparseResidency8Samples_ ), sparseResidency16Samples( sparseResidency16Samples_ ), sparseResidencyAliased( sparseResidencyAliased_ ), variableMultisampleRate( variableMultisampleRate_ ), inheritedQueries( inheritedQueries_ ) {} + VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceFeatures& operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceFeatures & operator=( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceFeatures ) ); return *this; } @@ -28989,6 +30884,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceFeatures const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -28999,6 +30895,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceFeatures const& ) const = default; +#else bool operator==( PhysicalDeviceFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( robustBufferAccess == rhs.robustBufferAccess ) @@ -29062,6 +30962,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess = {}; @@ -29119,44 +31022,44 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased = {}; VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate = {}; VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries = {}; + }; static_assert( sizeof( PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DeviceCreateInfo { - VULKAN_HPP_CONSTEXPR DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = {}, - uint32_t queueCreateInfoCount_ = {}, - const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos_ = {}, - uint32_t enabledLayerCount_ = {}, - const char* const* ppEnabledLayerNames_ = {}, - uint32_t enabledExtensionCount_ = {}, - const char* const* ppEnabledExtensionNames_ = {}, - const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , queueCreateInfoCount( queueCreateInfoCount_ ) - , pQueueCreateInfos( pQueueCreateInfos_ ) - , enabledLayerCount( enabledLayerCount_ ) - , ppEnabledLayerNames( ppEnabledLayerNames_ ) - , enabledExtensionCount( enabledExtensionCount_ ) - , ppEnabledExtensionNames( ppEnabledExtensionNames_ ) - , pEnabledFeatures( pEnabledFeatures_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceCreateInfo(VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = {}, uint32_t queueCreateInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos_ = {}, uint32_t enabledLayerCount_ = {}, const char* const * ppEnabledLayerNames_ = {}, uint32_t enabledExtensionCount_ = {}, const char* const * ppEnabledExtensionNames_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), queueCreateInfoCount( queueCreateInfoCount_ ), pQueueCreateInfos( pQueueCreateInfos_ ), enabledLayerCount( enabledLayerCount_ ), ppEnabledLayerNames( ppEnabledLayerNames_ ), enabledExtensionCount( enabledExtensionCount_ ), ppEnabledExtensionNames( ppEnabledExtensionNames_ ), pEnabledFeatures( pEnabledFeatures_ ) {} - VULKAN_HPP_NAMESPACE::DeviceCreateInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceCreateInfo ) - offsetof( DeviceCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DeviceCreateInfo( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - DeviceCreateInfo& operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueCreateInfos_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ = {} ) + : flags( flags_ ), queueCreateInfoCount( static_cast( queueCreateInfos_.size() ) ), pQueueCreateInfos( queueCreateInfos_.data() ), enabledLayerCount( static_cast( pEnabledLayerNames_.size() ) ), ppEnabledLayerNames( pEnabledLayerNames_.data() ), enabledExtensionCount( static_cast( pEnabledExtensionNames_.size() ) ), ppEnabledExtensionNames( pEnabledExtensionNames_.data() ), pEnabledFeatures( pEnabledFeatures_ ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DeviceCreateInfo & operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceCreateInfo & operator=( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DeviceCreateInfo ) ); return *this; } @@ -29184,36 +31087,64 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DeviceCreateInfo & setQueueCreateInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueCreateInfos_ ) VULKAN_HPP_NOEXCEPT + { + queueCreateInfoCount = static_cast( queueCreateInfos_.size() ); + pQueueCreateInfos = queueCreateInfos_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DeviceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT { enabledLayerCount = enabledLayerCount_; return *this; } - DeviceCreateInfo & setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + DeviceCreateInfo & setPpEnabledLayerNames( const char* const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT { ppEnabledLayerNames = ppEnabledLayerNames_; return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DeviceCreateInfo & setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + { + enabledLayerCount = static_cast( pEnabledLayerNames_.size() ); + ppEnabledLayerNames = pEnabledLayerNames_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DeviceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT { enabledExtensionCount = enabledExtensionCount_; return *this; } - DeviceCreateInfo & setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT + DeviceCreateInfo & setPpEnabledExtensionNames( const char* const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT { ppEnabledExtensionNames = ppEnabledExtensionNames_; return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DeviceCreateInfo & setPEnabledExtensionNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT + { + enabledExtensionCount = static_cast( pEnabledExtensionNames_.size() ); + ppEnabledExtensionNames = pEnabledExtensionNames_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DeviceCreateInfo & setPEnabledFeatures( const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT { pEnabledFeatures = pEnabledFeatures_; return *this; } + operator VkDeviceCreateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -29224,6 +31155,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceCreateInfo const& ) const = default; +#else bool operator==( DeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -29242,6 +31177,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceCreateInfo; @@ -29250,34 +31188,135 @@ namespace VULKAN_HPP_NAMESPACE uint32_t queueCreateInfoCount = {}; const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos = {}; uint32_t enabledLayerCount = {}; - const char* const* ppEnabledLayerNames = {}; + const char* const * ppEnabledLayerNames = {}; uint32_t enabledExtensionCount = {}; - const char* const* ppEnabledExtensionNames = {}; + const char* const * ppEnabledExtensionNames = {}; const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures = {}; + }; static_assert( sizeof( DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct DeviceEventInfoEXT + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug ) VULKAN_HPP_NOEXCEPT - : deviceEvent( deviceEvent_ ) + using Type = DeviceCreateInfo; + }; + + struct DeviceDiagnosticsConfigCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV(VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) {} - VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceDiagnosticsConfigCreateInfoNV( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT ) - offsetof( DeviceEventInfoEXT, pNext ) ); + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DeviceDiagnosticsConfigCreateInfoNV & operator=( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } + DeviceDiagnosticsConfigCreateInfoNV & operator=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DeviceDiagnosticsConfigCreateInfoNV ) ); + return *this; + } + + DeviceDiagnosticsConfigCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceDiagnosticsConfigCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + + operator VkDeviceDiagnosticsConfigCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceDiagnosticsConfigCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceDiagnosticsConfigCreateInfoNV const& ) const = default; +#else + bool operator==( DeviceDiagnosticsConfigCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ); + } + + bool operator!=( DeviceDiagnosticsConfigCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags = {}; + + }; + static_assert( sizeof( DeviceDiagnosticsConfigCreateInfoNV ) == sizeof( VkDeviceDiagnosticsConfigCreateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceDiagnosticsConfigCreateInfoNV; + }; + + struct DeviceEventInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceEventInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT(VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug) VULKAN_HPP_NOEXCEPT + : deviceEvent( deviceEvent_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - DeviceEventInfoEXT& operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceEventInfoEXT & operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceEventInfoEXT & operator=( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DeviceEventInfoEXT ) ); return *this; } @@ -29293,6 +31332,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkDeviceEventInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -29303,6 +31343,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceEventInfoEXT const& ) const = default; +#else bool operator==( DeviceEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -29314,207 +31358,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceEventInfoEXT; const void* pNext = {}; VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug; + }; static_assert( sizeof( DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct DeviceGeneratedCommandsFeaturesNVX + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR DeviceGeneratedCommandsFeaturesNVX( VULKAN_HPP_NAMESPACE::Bool32 computeBindingPointSupport_ = {} ) VULKAN_HPP_NOEXCEPT - : computeBindingPointSupport( computeBindingPointSupport_ ) - {} - - VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsFeaturesNVX & operator=( VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsFeaturesNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsFeaturesNVX ) - offsetof( DeviceGeneratedCommandsFeaturesNVX, pNext ) ); - return *this; - } - - DeviceGeneratedCommandsFeaturesNVX( VkDeviceGeneratedCommandsFeaturesNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DeviceGeneratedCommandsFeaturesNVX& operator=( VkDeviceGeneratedCommandsFeaturesNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DeviceGeneratedCommandsFeaturesNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DeviceGeneratedCommandsFeaturesNVX & setComputeBindingPointSupport( VULKAN_HPP_NAMESPACE::Bool32 computeBindingPointSupport_ ) VULKAN_HPP_NOEXCEPT - { - computeBindingPointSupport = computeBindingPointSupport_; - return *this; - } - - operator VkDeviceGeneratedCommandsFeaturesNVX const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDeviceGeneratedCommandsFeaturesNVX &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( computeBindingPointSupport == rhs.computeBindingPointSupport ); - } - - bool operator!=( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGeneratedCommandsFeaturesNVX; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 computeBindingPointSupport = {}; + using Type = DeviceEventInfoEXT; }; - static_assert( sizeof( DeviceGeneratedCommandsFeaturesNVX ) == sizeof( VkDeviceGeneratedCommandsFeaturesNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DeviceGeneratedCommandsLimitsNVX - { - VULKAN_HPP_CONSTEXPR DeviceGeneratedCommandsLimitsNVX( uint32_t maxIndirectCommandsLayoutTokenCount_ = {}, - uint32_t maxObjectEntryCounts_ = {}, - uint32_t minSequenceCountBufferOffsetAlignment_ = {}, - uint32_t minSequenceIndexBufferOffsetAlignment_ = {}, - uint32_t minCommandsTokenBufferOffsetAlignment_ = {} ) VULKAN_HPP_NOEXCEPT - : maxIndirectCommandsLayoutTokenCount( maxIndirectCommandsLayoutTokenCount_ ) - , maxObjectEntryCounts( maxObjectEntryCounts_ ) - , minSequenceCountBufferOffsetAlignment( minSequenceCountBufferOffsetAlignment_ ) - , minSequenceIndexBufferOffsetAlignment( minSequenceIndexBufferOffsetAlignment_ ) - , minCommandsTokenBufferOffsetAlignment( minCommandsTokenBufferOffsetAlignment_ ) - {} - - VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX & operator=( VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX ) - offsetof( DeviceGeneratedCommandsLimitsNVX, pNext ) ); - return *this; - } - - DeviceGeneratedCommandsLimitsNVX( VkDeviceGeneratedCommandsLimitsNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DeviceGeneratedCommandsLimitsNVX& operator=( VkDeviceGeneratedCommandsLimitsNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DeviceGeneratedCommandsLimitsNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DeviceGeneratedCommandsLimitsNVX & setMaxIndirectCommandsLayoutTokenCount( uint32_t maxIndirectCommandsLayoutTokenCount_ ) VULKAN_HPP_NOEXCEPT - { - maxIndirectCommandsLayoutTokenCount = maxIndirectCommandsLayoutTokenCount_; - return *this; - } - - DeviceGeneratedCommandsLimitsNVX & setMaxObjectEntryCounts( uint32_t maxObjectEntryCounts_ ) VULKAN_HPP_NOEXCEPT - { - maxObjectEntryCounts = maxObjectEntryCounts_; - return *this; - } - - DeviceGeneratedCommandsLimitsNVX & setMinSequenceCountBufferOffsetAlignment( uint32_t minSequenceCountBufferOffsetAlignment_ ) VULKAN_HPP_NOEXCEPT - { - minSequenceCountBufferOffsetAlignment = minSequenceCountBufferOffsetAlignment_; - return *this; - } - - DeviceGeneratedCommandsLimitsNVX & setMinSequenceIndexBufferOffsetAlignment( uint32_t minSequenceIndexBufferOffsetAlignment_ ) VULKAN_HPP_NOEXCEPT - { - minSequenceIndexBufferOffsetAlignment = minSequenceIndexBufferOffsetAlignment_; - return *this; - } - - DeviceGeneratedCommandsLimitsNVX & setMinCommandsTokenBufferOffsetAlignment( uint32_t minCommandsTokenBufferOffsetAlignment_ ) VULKAN_HPP_NOEXCEPT - { - minCommandsTokenBufferOffsetAlignment = minCommandsTokenBufferOffsetAlignment_; - return *this; - } - - operator VkDeviceGeneratedCommandsLimitsNVX const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDeviceGeneratedCommandsLimitsNVX &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DeviceGeneratedCommandsLimitsNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxIndirectCommandsLayoutTokenCount == rhs.maxIndirectCommandsLayoutTokenCount ) - && ( maxObjectEntryCounts == rhs.maxObjectEntryCounts ) - && ( minSequenceCountBufferOffsetAlignment == rhs.minSequenceCountBufferOffsetAlignment ) - && ( minSequenceIndexBufferOffsetAlignment == rhs.minSequenceIndexBufferOffsetAlignment ) - && ( minCommandsTokenBufferOffsetAlignment == rhs.minCommandsTokenBufferOffsetAlignment ); - } - - bool operator!=( DeviceGeneratedCommandsLimitsNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGeneratedCommandsLimitsNVX; - const void* pNext = {}; - uint32_t maxIndirectCommandsLayoutTokenCount = {}; - uint32_t maxObjectEntryCounts = {}; - uint32_t minSequenceCountBufferOffsetAlignment = {}; - uint32_t minSequenceIndexBufferOffsetAlignment = {}; - uint32_t minCommandsTokenBufferOffsetAlignment = {}; - }; - static_assert( sizeof( DeviceGeneratedCommandsLimitsNVX ) == sizeof( VkDeviceGeneratedCommandsLimitsNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DeviceGroupBindSparseInfo { - VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( uint32_t resourceDeviceIndex_ = {}, - uint32_t memoryDeviceIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : resourceDeviceIndex( resourceDeviceIndex_ ) - , memoryDeviceIndex( memoryDeviceIndex_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupBindSparseInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo(uint32_t resourceDeviceIndex_ = {}, uint32_t memoryDeviceIndex_ = {}) VULKAN_HPP_NOEXCEPT + : resourceDeviceIndex( resourceDeviceIndex_ ), memoryDeviceIndex( memoryDeviceIndex_ ) {} - VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo ) - offsetof( DeviceGroupBindSparseInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - DeviceGroupBindSparseInfo& operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceGroupBindSparseInfo & operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceGroupBindSparseInfo & operator=( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DeviceGroupBindSparseInfo ) ); return *this; } @@ -29536,6 +31425,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkDeviceGroupBindSparseInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -29546,6 +31436,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceGroupBindSparseInfo const& ) const = default; +#else bool operator==( DeviceGroupBindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -29558,36 +31452,54 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupBindSparseInfo; const void* pNext = {}; uint32_t resourceDeviceIndex = {}; uint32_t memoryDeviceIndex = {}; + }; static_assert( sizeof( DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = DeviceGroupBindSparseInfo; + }; + using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo; + struct DeviceGroupCommandBufferBeginInfo { - VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT - : deviceMask( deviceMask_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupCommandBufferBeginInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo(uint32_t deviceMask_ = {}) VULKAN_HPP_NOEXCEPT + : deviceMask( deviceMask_ ) {} - VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo ) - offsetof( DeviceGroupCommandBufferBeginInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - DeviceGroupCommandBufferBeginInfo& operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceGroupCommandBufferBeginInfo & operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceGroupCommandBufferBeginInfo & operator=( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DeviceGroupCommandBufferBeginInfo ) ); return *this; } @@ -29603,6 +31515,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkDeviceGroupCommandBufferBeginInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -29613,6 +31526,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceGroupCommandBufferBeginInfo const& ) const = default; +#else bool operator==( DeviceGroupCommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -29624,4821 +31541,2767 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo; const void* pNext = {}; uint32_t deviceMask = {}; + }; static_assert( sizeof( DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct DeviceGroupDeviceCreateInfo + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( uint32_t physicalDeviceCount_ = {}, - const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices_ = {} ) VULKAN_HPP_NOEXCEPT - : physicalDeviceCount( physicalDeviceCount_ ) - , pPhysicalDevices( pPhysicalDevices_ ) + using Type = DeviceGroupCommandBufferBeginInfo; + }; + using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo; + + class DisplayKHR + { + public: + using CType = VkDisplayKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR; + + public: + VULKAN_HPP_CONSTEXPR DisplayKHR() VULKAN_HPP_NOEXCEPT + : m_displayKHR(VK_NULL_HANDLE) {} - VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DisplayKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_displayKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT + : m_displayKHR( displayKHR ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DisplayKHR & operator=(VkDisplayKHR displayKHR) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo ) - offsetof( DeviceGroupDeviceCreateInfo, pNext ) ); + m_displayKHR = displayKHR; + return *this; + } +#endif + + DisplayKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_displayKHR = VK_NULL_HANDLE; return *this; } - DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayKHR const& ) const = default; +#else + bool operator==( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR == rhs.m_displayKHR; + } + + bool operator!=(DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR != rhs.m_displayKHR; + } + + bool operator<(DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR < rhs.m_displayKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayKHR() const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR == VK_NULL_HANDLE; + } + + private: + VkDisplayKHR m_displayKHR; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DisplayKHR ) == sizeof( VkDisplayKHR ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DisplayKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DisplayKHR; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DisplayKHR; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct PerformanceConfigurationAcquireInfoINTEL + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceConfigurationAcquireInfoINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated) VULKAN_HPP_NOEXCEPT + : type( type_ ) + {} + + VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - DeviceGroupDeviceCreateInfo& operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PerformanceConfigurationAcquireInfoINTEL & operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DeviceGroupDeviceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PerformanceConfigurationAcquireInfoINTEL & operator=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PerformanceConfigurationAcquireInfoINTEL ) ); + return *this; + } + + PerformanceConfigurationAcquireInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DeviceGroupDeviceCreateInfo & setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) VULKAN_HPP_NOEXCEPT + PerformanceConfigurationAcquireInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT { - physicalDeviceCount = physicalDeviceCount_; + type = type_; return *this; } - DeviceGroupDeviceCreateInfo & setPPhysicalDevices( const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT + + operator VkPerformanceConfigurationAcquireInfoINTEL const&() const VULKAN_HPP_NOEXCEPT { - pPhysicalDevices = pPhysicalDevices_; - return *this; + return *reinterpret_cast( this ); } - operator VkDeviceGroupDeviceCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPerformanceConfigurationAcquireInfoINTEL &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDeviceGroupDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - bool operator==( DeviceGroupDeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PerformanceConfigurationAcquireInfoINTEL const& ) const = default; +#else + bool operator==( PerformanceConfigurationAcquireInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) - && ( physicalDeviceCount == rhs.physicalDeviceCount ) - && ( pPhysicalDevices == rhs.pPhysicalDevices ); + && ( type == rhs.type ); } - bool operator!=( DeviceGroupDeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PerformanceConfigurationAcquireInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif + + public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL; const void* pNext = {}; - uint32_t physicalDeviceCount = {}; - const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices = {}; + VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated; + }; - static_assert( sizeof( DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PerformanceConfigurationAcquireInfoINTEL ) == sizeof( VkPerformanceConfigurationAcquireInfoINTEL ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct DeviceGroupPresentCapabilitiesKHR + template <> + struct CppType { - DeviceGroupPresentCapabilitiesKHR( std::array const& presentMask_ = {}, - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {} ) VULKAN_HPP_NOEXCEPT - : presentMask{} - , modes( modes_ ) - { - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( presentMask, presentMask_ ); - } + using Type = PerformanceConfigurationAcquireInfoINTEL; + }; - VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR ) - offsetof( DeviceGroupPresentCapabilitiesKHR, pNext ) ); - return *this; - } + class PerformanceConfigurationINTEL + { + public: + using CType = VkPerformanceConfigurationINTEL; - DeviceGroupPresentCapabilitiesKHR( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DeviceGroupPresentCapabilitiesKHR& operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkDeviceGroupPresentCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDeviceGroupPresentCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DeviceGroupPresentCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memcmp( presentMask, rhs.presentMask, VK_MAX_DEVICE_GROUP_SIZE * sizeof( uint32_t ) ) == 0 ) - && ( modes == rhs.modes ); - } - - bool operator!=( DeviceGroupPresentCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR; - const void* pNext = {}; - uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE] = {}; - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {}; - }; - static_assert( sizeof( DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DeviceGroupPresentInfoKHR - { - VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( uint32_t swapchainCount_ = {}, - const uint32_t* pDeviceMasks_ = {}, - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal ) VULKAN_HPP_NOEXCEPT - : swapchainCount( swapchainCount_ ) - , pDeviceMasks( pDeviceMasks_ ) - , mode( mode_ ) + VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL() VULKAN_HPP_NOEXCEPT + : m_performanceConfigurationINTEL(VK_NULL_HANDLE) {} - VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_performanceConfigurationINTEL(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PerformanceConfigurationINTEL( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT + : m_performanceConfigurationINTEL( performanceConfigurationINTEL ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + PerformanceConfigurationINTEL & operator=(VkPerformanceConfigurationINTEL performanceConfigurationINTEL) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR ) - offsetof( DeviceGroupPresentInfoKHR, pNext ) ); + m_performanceConfigurationINTEL = performanceConfigurationINTEL; + return *this; + } +#endif + + PerformanceConfigurationINTEL & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_performanceConfigurationINTEL = VK_NULL_HANDLE; return *this; } - DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PerformanceConfigurationINTEL const& ) const = default; +#else + bool operator==( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL == rhs.m_performanceConfigurationINTEL; + } + + bool operator!=(PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL != rhs.m_performanceConfigurationINTEL; + } + + bool operator<(PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL < rhs.m_performanceConfigurationINTEL; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPerformanceConfigurationINTEL() const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL == VK_NULL_HANDLE; + } + + private: + VkPerformanceConfigurationINTEL m_performanceConfigurationINTEL; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL ) == sizeof( VkPerformanceConfigurationINTEL ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL; + }; + + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class QueryPool + { + public: + using CType = VkQueryPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool; + + public: + VULKAN_HPP_CONSTEXPR QueryPool() VULKAN_HPP_NOEXCEPT + : m_queryPool(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_queryPool(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT + : m_queryPool( queryPool ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + QueryPool & operator=(VkQueryPool queryPool) VULKAN_HPP_NOEXCEPT + { + m_queryPool = queryPool; + return *this; + } +#endif + + QueryPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_queryPool = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( QueryPool const& ) const = default; +#else + bool operator==( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queryPool == rhs.m_queryPool; + } + + bool operator!=(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queryPool != rhs.m_queryPool; + } + + bool operator<(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queryPool < rhs.m_queryPool; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const VULKAN_HPP_NOEXCEPT + { + return m_queryPool; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_queryPool != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_queryPool == VK_NULL_HANDLE; + } + + private: + VkQueryPool m_queryPool; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::QueryPool ) == sizeof( VkQueryPool ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::QueryPool; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::QueryPool; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::QueryPool; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct RenderPassBeginInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassBeginInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo(VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, uint32_t clearValueCount_ = {}, const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ = {}) VULKAN_HPP_NOEXCEPT + : renderPass( renderPass_ ), framebuffer( framebuffer_ ), renderArea( renderArea_ ), clearValueCount( clearValueCount_ ), pClearValues( pClearValues_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - DeviceGroupPresentInfoKHR& operator=( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_, VULKAN_HPP_NAMESPACE::Rect2D renderArea_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & clearValues_ ) + : renderPass( renderPass_ ), framebuffer( framebuffer_ ), renderArea( renderArea_ ), clearValueCount( static_cast( clearValues_.size() ) ), pClearValues( clearValues_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + RenderPassBeginInfo & operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DeviceGroupPresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + RenderPassBeginInfo & operator=( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( RenderPassBeginInfo ) ); + return *this; + } + + RenderPassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DeviceGroupPresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT - { - swapchainCount = swapchainCount_; - return *this; - } - - DeviceGroupPresentInfoKHR & setPDeviceMasks( const uint32_t* pDeviceMasks_ ) VULKAN_HPP_NOEXCEPT - { - pDeviceMasks = pDeviceMasks_; - return *this; - } - - DeviceGroupPresentInfoKHR & setMode( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT - { - mode = mode_; - return *this; - } - - operator VkDeviceGroupPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDeviceGroupPresentInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DeviceGroupPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( swapchainCount == rhs.swapchainCount ) - && ( pDeviceMasks == rhs.pDeviceMasks ) - && ( mode == rhs.mode ); - } - - bool operator!=( DeviceGroupPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentInfoKHR; - const void* pNext = {}; - uint32_t swapchainCount = {}; - const uint32_t* pDeviceMasks = {}; - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal; - }; - static_assert( sizeof( DeviceGroupPresentInfoKHR ) == sizeof( VkDeviceGroupPresentInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DeviceGroupRenderPassBeginInfo - { - VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_ = {}, - uint32_t deviceRenderAreaCount_ = {}, - const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas_ = {} ) VULKAN_HPP_NOEXCEPT - : deviceMask( deviceMask_ ) - , deviceRenderAreaCount( deviceRenderAreaCount_ ) - , pDeviceRenderAreas( pDeviceRenderAreas_ ) - {} - - VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo ) - offsetof( DeviceGroupRenderPassBeginInfo, pNext ) ); - return *this; - } - - DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DeviceGroupRenderPassBeginInfo& operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DeviceGroupRenderPassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DeviceGroupRenderPassBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT - { - deviceMask = deviceMask_; - return *this; - } - - DeviceGroupRenderPassBeginInfo & setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT - { - deviceRenderAreaCount = deviceRenderAreaCount_; - return *this; - } - - DeviceGroupRenderPassBeginInfo & setPDeviceRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT - { - pDeviceRenderAreas = pDeviceRenderAreas_; - return *this; - } - - operator VkDeviceGroupRenderPassBeginInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDeviceGroupRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DeviceGroupRenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceMask == rhs.deviceMask ) - && ( deviceRenderAreaCount == rhs.deviceRenderAreaCount ) - && ( pDeviceRenderAreas == rhs.pDeviceRenderAreas ); - } - - bool operator!=( DeviceGroupRenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo; - const void* pNext = {}; - uint32_t deviceMask = {}; - uint32_t deviceRenderAreaCount = {}; - const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas = {}; - }; - static_assert( sizeof( DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DeviceGroupSubmitInfo - { - VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( uint32_t waitSemaphoreCount_ = {}, - const uint32_t* pWaitSemaphoreDeviceIndices_ = {}, - uint32_t commandBufferCount_ = {}, - const uint32_t* pCommandBufferDeviceMasks_ = {}, - uint32_t signalSemaphoreCount_ = {}, - const uint32_t* pSignalSemaphoreDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT - : waitSemaphoreCount( waitSemaphoreCount_ ) - , pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ ) - , commandBufferCount( commandBufferCount_ ) - , pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ ) - , signalSemaphoreCount( signalSemaphoreCount_ ) - , pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ ) - {} - - VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo ) - offsetof( DeviceGroupSubmitInfo, pNext ) ); - return *this; - } - - DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DeviceGroupSubmitInfo& operator=( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DeviceGroupSubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DeviceGroupSubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT - { - waitSemaphoreCount = waitSemaphoreCount_; - return *this; - } - - DeviceGroupSubmitInfo & setPWaitSemaphoreDeviceIndices( const uint32_t* pWaitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT - { - pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_; - return *this; - } - - DeviceGroupSubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT - { - commandBufferCount = commandBufferCount_; - return *this; - } - - DeviceGroupSubmitInfo & setPCommandBufferDeviceMasks( const uint32_t* pCommandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT - { - pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_; - return *this; - } - - DeviceGroupSubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT - { - signalSemaphoreCount = signalSemaphoreCount_; - return *this; - } - - DeviceGroupSubmitInfo & setPSignalSemaphoreDeviceIndices( const uint32_t* pSignalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT - { - pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_; - return *this; - } - - operator VkDeviceGroupSubmitInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDeviceGroupSubmitInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DeviceGroupSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) - && ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices ) - && ( commandBufferCount == rhs.commandBufferCount ) - && ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks ) - && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) - && ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices ); - } - - bool operator!=( DeviceGroupSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSubmitInfo; - const void* pNext = {}; - uint32_t waitSemaphoreCount = {}; - const uint32_t* pWaitSemaphoreDeviceIndices = {}; - uint32_t commandBufferCount = {}; - const uint32_t* pCommandBufferDeviceMasks = {}; - uint32_t signalSemaphoreCount = {}; - const uint32_t* pSignalSemaphoreDeviceIndices = {}; - }; - static_assert( sizeof( DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DeviceGroupSwapchainCreateInfoKHR - { - VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {} ) VULKAN_HPP_NOEXCEPT - : modes( modes_ ) - {} - - VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR ) - offsetof( DeviceGroupSwapchainCreateInfoKHR, pNext ) ); - return *this; - } - - DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DeviceGroupSwapchainCreateInfoKHR& operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DeviceGroupSwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DeviceGroupSwapchainCreateInfoKHR & setModes( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT - { - modes = modes_; - return *this; - } - - operator VkDeviceGroupSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDeviceGroupSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( modes == rhs.modes ); - } - - bool operator!=( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {}; - }; - static_assert( sizeof( DeviceGroupSwapchainCreateInfoKHR ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DeviceMemoryOpaqueCaptureAddressInfo - { - VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {} ) VULKAN_HPP_NOEXCEPT - : memory( memory_ ) - {} - - VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo ) - offsetof( DeviceMemoryOpaqueCaptureAddressInfo, pNext ) ); - return *this; - } - - DeviceMemoryOpaqueCaptureAddressInfo( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DeviceMemoryOpaqueCaptureAddressInfo& operator=( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DeviceMemoryOpaqueCaptureAddressInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DeviceMemoryOpaqueCaptureAddressInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT - { - memory = memory_; - return *this; - } - - operator VkDeviceMemoryOpaqueCaptureAddressInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDeviceMemoryOpaqueCaptureAddressInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DeviceMemoryOpaqueCaptureAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memory == rhs.memory ); - } - - bool operator!=( DeviceMemoryOpaqueCaptureAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - }; - static_assert( sizeof( DeviceMemoryOpaqueCaptureAddressInfo ) == sizeof( VkDeviceMemoryOpaqueCaptureAddressInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DeviceMemoryOverallocationCreateInfoAMD - { - VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault ) VULKAN_HPP_NOEXCEPT - : overallocationBehavior( overallocationBehavior_ ) - {} - - VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD & operator=( VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD ) - offsetof( DeviceMemoryOverallocationCreateInfoAMD, pNext ) ); - return *this; - } - - DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DeviceMemoryOverallocationCreateInfoAMD& operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DeviceMemoryOverallocationCreateInfoAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DeviceMemoryOverallocationCreateInfoAMD & setOverallocationBehavior( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ ) VULKAN_HPP_NOEXCEPT - { - overallocationBehavior = overallocationBehavior_; - return *this; - } - - operator VkDeviceMemoryOverallocationCreateInfoAMD const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDeviceMemoryOverallocationCreateInfoAMD &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( overallocationBehavior == rhs.overallocationBehavior ); - } - - bool operator!=( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault; - }; - static_assert( sizeof( DeviceMemoryOverallocationCreateInfoAMD ) == sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DeviceQueueGlobalPriorityCreateInfoEXT - { - VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoEXT( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow ) VULKAN_HPP_NOEXCEPT - : globalPriority( globalPriority_ ) - {} - - VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoEXT ) - offsetof( DeviceQueueGlobalPriorityCreateInfoEXT, pNext ) ); - return *this; - } - - DeviceQueueGlobalPriorityCreateInfoEXT( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DeviceQueueGlobalPriorityCreateInfoEXT& operator=( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DeviceQueueGlobalPriorityCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DeviceQueueGlobalPriorityCreateInfoEXT & setGlobalPriority( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ ) VULKAN_HPP_NOEXCEPT - { - globalPriority = globalPriority_; - return *this; - } - - operator VkDeviceQueueGlobalPriorityCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDeviceQueueGlobalPriorityCreateInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( globalPriority == rhs.globalPriority ); - } - - bool operator!=( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow; - }; - static_assert( sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DeviceQueueInfo2 - { - VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, - uint32_t queueFamilyIndex_ = {}, - uint32_t queueIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , queueFamilyIndex( queueFamilyIndex_ ) - , queueIndex( queueIndex_ ) - {} - - VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & operator=( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 ) - offsetof( DeviceQueueInfo2, pNext ) ); - return *this; - } - - DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DeviceQueueInfo2& operator=( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DeviceQueueInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DeviceQueueInfo2 & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - DeviceQueueInfo2 & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT - { - queueFamilyIndex = queueFamilyIndex_; - return *this; - } - - DeviceQueueInfo2 & setQueueIndex( uint32_t queueIndex_ ) VULKAN_HPP_NOEXCEPT - { - queueIndex = queueIndex_; - return *this; - } - - operator VkDeviceQueueInfo2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDeviceQueueInfo2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DeviceQueueInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( queueFamilyIndex == rhs.queueFamilyIndex ) - && ( queueIndex == rhs.queueIndex ); - } - - bool operator!=( DeviceQueueInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueInfo2; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {}; - uint32_t queueFamilyIndex = {}; - uint32_t queueIndex = {}; - }; - static_assert( sizeof( DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DispatchIndirectCommand - { - VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( uint32_t x_ = {}, - uint32_t y_ = {}, - uint32_t z_ = {} ) VULKAN_HPP_NOEXCEPT - : x( x_ ) - , y( y_ ) - , z( z_ ) - {} - - DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DispatchIndirectCommand& operator=( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DispatchIndirectCommand & setX( uint32_t x_ ) VULKAN_HPP_NOEXCEPT - { - x = x_; - return *this; - } - - DispatchIndirectCommand & setY( uint32_t y_ ) VULKAN_HPP_NOEXCEPT - { - y = y_; - return *this; - } - - DispatchIndirectCommand & setZ( uint32_t z_ ) VULKAN_HPP_NOEXCEPT - { - z = z_; - return *this; - } - - operator VkDispatchIndirectCommand const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDispatchIndirectCommand &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DispatchIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( x == rhs.x ) - && ( y == rhs.y ) - && ( z == rhs.z ); - } - - bool operator!=( DispatchIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - uint32_t x = {}; - uint32_t y = {}; - uint32_t z = {}; - }; - static_assert( sizeof( DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DisplayEventInfoEXT - { - VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut ) VULKAN_HPP_NOEXCEPT - : displayEvent( displayEvent_ ) - {} - - VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT ) - offsetof( DisplayEventInfoEXT, pNext ) ); - return *this; - } - - DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayEventInfoEXT& operator=( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DisplayEventInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DisplayEventInfoEXT & setDisplayEvent( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ ) VULKAN_HPP_NOEXCEPT - { - displayEvent = displayEvent_; - return *this; - } - - operator VkDisplayEventInfoEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayEventInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( displayEvent == rhs.displayEvent ); - } - - bool operator!=( DisplayEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayEventInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut; - }; - static_assert( sizeof( DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DisplayModeParametersKHR - { - VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ = {}, - uint32_t refreshRate_ = {} ) VULKAN_HPP_NOEXCEPT - : visibleRegion( visibleRegion_ ) - , refreshRate( refreshRate_ ) - {} - - DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayModeParametersKHR& operator=( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DisplayModeParametersKHR & setVisibleRegion( VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ ) VULKAN_HPP_NOEXCEPT - { - visibleRegion = visibleRegion_; - return *this; - } - - DisplayModeParametersKHR & setRefreshRate( uint32_t refreshRate_ ) VULKAN_HPP_NOEXCEPT - { - refreshRate = refreshRate_; - return *this; - } - - operator VkDisplayModeParametersKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayModeParametersKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayModeParametersKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( visibleRegion == rhs.visibleRegion ) - && ( refreshRate == rhs.refreshRate ); - } - - bool operator!=( DisplayModeParametersKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::Extent2D visibleRegion = {}; - uint32_t refreshRate = {}; - }; - static_assert( sizeof( DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DisplayModeCreateInfoKHR - { - VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ = {}, - VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , parameters( parameters_ ) - {} - - VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR ) - offsetof( DisplayModeCreateInfoKHR, pNext ) ); - return *this; - } - - DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayModeCreateInfoKHR& operator=( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DisplayModeCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DisplayModeCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - DisplayModeCreateInfoKHR & setParameters( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ ) VULKAN_HPP_NOEXCEPT - { - parameters = parameters_; - return *this; - } - - operator VkDisplayModeCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayModeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayModeCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( parameters == rhs.parameters ); - } - - bool operator!=( DisplayModeCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags = {}; - VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {}; - }; - static_assert( sizeof( DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DisplayModePropertiesKHR - { - DisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, - VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT - : displayMode( displayMode_ ) - , parameters( parameters_ ) - {} - - DisplayModePropertiesKHR( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayModePropertiesKHR& operator=( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkDisplayModePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayModePropertiesKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayModePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( displayMode == rhs.displayMode ) - && ( parameters == rhs.parameters ); - } - - bool operator!=( DisplayModePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {}; - VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {}; - }; - static_assert( sizeof( DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DisplayModeProperties2KHR - { - DisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties_ = {} ) VULKAN_HPP_NOEXCEPT - : displayModeProperties( displayModeProperties_ ) - {} - - VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR & operator=( VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR ) - offsetof( DisplayModeProperties2KHR, pNext ) ); - return *this; - } - - DisplayModeProperties2KHR( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayModeProperties2KHR& operator=( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkDisplayModeProperties2KHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayModeProperties2KHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayModeProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( displayModeProperties == rhs.displayModeProperties ); - } - - bool operator!=( DisplayModeProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeProperties2KHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties = {}; - }; - static_assert( sizeof( DisplayModeProperties2KHR ) == sizeof( VkDisplayModeProperties2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DisplayNativeHdrSurfaceCapabilitiesAMD - { - DisplayNativeHdrSurfaceCapabilitiesAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport_ = {} ) VULKAN_HPP_NOEXCEPT - : localDimmingSupport( localDimmingSupport_ ) - {} - - VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD ) - offsetof( DisplayNativeHdrSurfaceCapabilitiesAMD, pNext ) ); - return *this; - } - - DisplayNativeHdrSurfaceCapabilitiesAMD( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayNativeHdrSurfaceCapabilitiesAMD& operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayNativeHdrSurfaceCapabilitiesAMD &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayNativeHdrSurfaceCapabilitiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( localDimmingSupport == rhs.localDimmingSupport ); - } - - bool operator!=( DisplayNativeHdrSurfaceCapabilitiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport = {}; - }; - static_assert( sizeof( DisplayNativeHdrSurfaceCapabilitiesAMD ) == sizeof( VkDisplayNativeHdrSurfaceCapabilitiesAMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DisplayPlaneCapabilitiesKHR - { - DisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha_ = {}, - VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition_ = {}, - VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent_ = {}, - VULKAN_HPP_NAMESPACE::Offset2D minDstPosition_ = {}, - VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D minDstExtent_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent_ = {} ) VULKAN_HPP_NOEXCEPT - : supportedAlpha( supportedAlpha_ ) - , minSrcPosition( minSrcPosition_ ) - , maxSrcPosition( maxSrcPosition_ ) - , minSrcExtent( minSrcExtent_ ) - , maxSrcExtent( maxSrcExtent_ ) - , minDstPosition( minDstPosition_ ) - , maxDstPosition( maxDstPosition_ ) - , minDstExtent( minDstExtent_ ) - , maxDstExtent( maxDstExtent_ ) - {} - - DisplayPlaneCapabilitiesKHR( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayPlaneCapabilitiesKHR& operator=( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkDisplayPlaneCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayPlaneCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayPlaneCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( supportedAlpha == rhs.supportedAlpha ) - && ( minSrcPosition == rhs.minSrcPosition ) - && ( maxSrcPosition == rhs.maxSrcPosition ) - && ( minSrcExtent == rhs.minSrcExtent ) - && ( maxSrcExtent == rhs.maxSrcExtent ) - && ( minDstPosition == rhs.minDstPosition ) - && ( maxDstPosition == rhs.maxDstPosition ) - && ( minDstExtent == rhs.minDstExtent ) - && ( maxDstExtent == rhs.maxDstExtent ); - } - - bool operator!=( DisplayPlaneCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha = {}; - VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition = {}; - VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition = {}; - VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent = {}; - VULKAN_HPP_NAMESPACE::Offset2D minDstPosition = {}; - VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition = {}; - VULKAN_HPP_NAMESPACE::Extent2D minDstExtent = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent = {}; - }; - static_assert( sizeof( DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DisplayPlaneCapabilities2KHR - { - DisplayPlaneCapabilities2KHR( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities_ = {} ) VULKAN_HPP_NOEXCEPT - : capabilities( capabilities_ ) - {} - - VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR & operator=( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR ) - offsetof( DisplayPlaneCapabilities2KHR, pNext ) ); - return *this; - } - - DisplayPlaneCapabilities2KHR( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayPlaneCapabilities2KHR& operator=( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkDisplayPlaneCapabilities2KHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayPlaneCapabilities2KHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayPlaneCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( capabilities == rhs.capabilities ); - } - - bool operator!=( DisplayPlaneCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities = {}; - }; - static_assert( sizeof( DisplayPlaneCapabilities2KHR ) == sizeof( VkDisplayPlaneCapabilities2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DisplayPlaneInfo2KHR - { - VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ = {}, - uint32_t planeIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : mode( mode_ ) - , planeIndex( planeIndex_ ) - {} - - VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & operator=( VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR ) - offsetof( DisplayPlaneInfo2KHR, pNext ) ); - return *this; - } - - DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayPlaneInfo2KHR& operator=( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DisplayPlaneInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DisplayPlaneInfo2KHR & setMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ ) VULKAN_HPP_NOEXCEPT - { - mode = mode_; - return *this; - } - - DisplayPlaneInfo2KHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT - { - planeIndex = planeIndex_; - return *this; - } - - operator VkDisplayPlaneInfo2KHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayPlaneInfo2KHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayPlaneInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( mode == rhs.mode ) - && ( planeIndex == rhs.planeIndex ); - } - - bool operator!=( DisplayPlaneInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneInfo2KHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DisplayModeKHR mode = {}; - uint32_t planeIndex = {}; - }; - static_assert( sizeof( DisplayPlaneInfo2KHR ) == sizeof( VkDisplayPlaneInfo2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DisplayPlanePropertiesKHR - { - DisplayPlanePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay_ = {}, - uint32_t currentStackIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : currentDisplay( currentDisplay_ ) - , currentStackIndex( currentStackIndex_ ) - {} - - DisplayPlanePropertiesKHR( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayPlanePropertiesKHR& operator=( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkDisplayPlanePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayPlanePropertiesKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayPlanePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( currentDisplay == rhs.currentDisplay ) - && ( currentStackIndex == rhs.currentStackIndex ); - } - - bool operator!=( DisplayPlanePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay = {}; - uint32_t currentStackIndex = {}; - }; - static_assert( sizeof( DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DisplayPlaneProperties2KHR - { - DisplayPlaneProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties_ = {} ) VULKAN_HPP_NOEXCEPT - : displayPlaneProperties( displayPlaneProperties_ ) - {} - - VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR & operator=( VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR ) - offsetof( DisplayPlaneProperties2KHR, pNext ) ); - return *this; - } - - DisplayPlaneProperties2KHR( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayPlaneProperties2KHR& operator=( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkDisplayPlaneProperties2KHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayPlaneProperties2KHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayPlaneProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( displayPlaneProperties == rhs.displayPlaneProperties ); - } - - bool operator!=( DisplayPlaneProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneProperties2KHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties = {}; - }; - static_assert( sizeof( DisplayPlaneProperties2KHR ) == sizeof( VkDisplayPlaneProperties2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DisplayPowerInfoEXT - { - VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff ) VULKAN_HPP_NOEXCEPT - : powerState( powerState_ ) - {} - - VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT ) - offsetof( DisplayPowerInfoEXT, pNext ) ); - return *this; - } - - DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayPowerInfoEXT& operator=( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DisplayPowerInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DisplayPowerInfoEXT & setPowerState( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ ) VULKAN_HPP_NOEXCEPT - { - powerState = powerState_; - return *this; - } - - operator VkDisplayPowerInfoEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayPowerInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayPowerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( powerState == rhs.powerState ); - } - - bool operator!=( DisplayPowerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPowerInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff; - }; - static_assert( sizeof( DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DisplayPresentInfoKHR - { - VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( VULKAN_HPP_NAMESPACE::Rect2D srcRect_ = {}, - VULKAN_HPP_NAMESPACE::Rect2D dstRect_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 persistent_ = {} ) VULKAN_HPP_NOEXCEPT - : srcRect( srcRect_ ) - , dstRect( dstRect_ ) - , persistent( persistent_ ) - {} - - VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR & operator=( VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR ) - offsetof( DisplayPresentInfoKHR, pNext ) ); - return *this; - } - - DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayPresentInfoKHR& operator=( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DisplayPresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DisplayPresentInfoKHR & setSrcRect( VULKAN_HPP_NAMESPACE::Rect2D srcRect_ ) VULKAN_HPP_NOEXCEPT - { - srcRect = srcRect_; - return *this; - } - - DisplayPresentInfoKHR & setDstRect( VULKAN_HPP_NAMESPACE::Rect2D dstRect_ ) VULKAN_HPP_NOEXCEPT - { - dstRect = dstRect_; - return *this; - } - - DisplayPresentInfoKHR & setPersistent( VULKAN_HPP_NAMESPACE::Bool32 persistent_ ) VULKAN_HPP_NOEXCEPT - { - persistent = persistent_; - return *this; - } - - operator VkDisplayPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayPresentInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( srcRect == rhs.srcRect ) - && ( dstRect == rhs.dstRect ) - && ( persistent == rhs.persistent ); - } - - bool operator!=( DisplayPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPresentInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Rect2D srcRect = {}; - VULKAN_HPP_NAMESPACE::Rect2D dstRect = {}; - VULKAN_HPP_NAMESPACE::Bool32 persistent = {}; - }; - static_assert( sizeof( DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DisplayPropertiesKHR - { - DisplayPropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display_ = {}, - const char* displayName_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D physicalResolution_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 persistentContent_ = {} ) VULKAN_HPP_NOEXCEPT - : display( display_ ) - , displayName( displayName_ ) - , physicalDimensions( physicalDimensions_ ) - , physicalResolution( physicalResolution_ ) - , supportedTransforms( supportedTransforms_ ) - , planeReorderPossible( planeReorderPossible_ ) - , persistentContent( persistentContent_ ) - {} - - DisplayPropertiesKHR( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayPropertiesKHR& operator=( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkDisplayPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayPropertiesKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( display == rhs.display ) - && ( displayName == rhs.displayName ) - && ( physicalDimensions == rhs.physicalDimensions ) - && ( physicalResolution == rhs.physicalResolution ) - && ( supportedTransforms == rhs.supportedTransforms ) - && ( planeReorderPossible == rhs.planeReorderPossible ) - && ( persistentContent == rhs.persistentContent ); - } - - bool operator!=( DisplayPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::DisplayKHR display = {}; - const char* displayName = {}; - VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions = {}; - VULKAN_HPP_NAMESPACE::Extent2D physicalResolution = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; - VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible = {}; - VULKAN_HPP_NAMESPACE::Bool32 persistentContent = {}; - }; - static_assert( sizeof( DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DisplayProperties2KHR - { - DisplayProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties_ = {} ) VULKAN_HPP_NOEXCEPT - : displayProperties( displayProperties_ ) - {} - - VULKAN_HPP_NAMESPACE::DisplayProperties2KHR & operator=( VULKAN_HPP_NAMESPACE::DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayProperties2KHR ) - offsetof( DisplayProperties2KHR, pNext ) ); - return *this; - } - - DisplayProperties2KHR( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayProperties2KHR& operator=( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkDisplayProperties2KHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayProperties2KHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( displayProperties == rhs.displayProperties ); - } - - bool operator!=( DisplayProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayProperties2KHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties = {}; - }; - static_assert( sizeof( DisplayProperties2KHR ) == sizeof( VkDisplayProperties2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DisplaySurfaceCreateInfoKHR - { - VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ = {}, - VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, - uint32_t planeIndex_ = {}, - uint32_t planeStackIndex_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, - float globalAlpha_ = {}, - VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque, - VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , displayMode( displayMode_ ) - , planeIndex( planeIndex_ ) - , planeStackIndex( planeStackIndex_ ) - , transform( transform_ ) - , globalAlpha( globalAlpha_ ) - , alphaMode( alphaMode_ ) - , imageExtent( imageExtent_ ) - {} - - VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR ) - offsetof( DisplaySurfaceCreateInfoKHR, pNext ) ); - return *this; - } - - DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplaySurfaceCreateInfoKHR& operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DisplaySurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DisplaySurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - DisplaySurfaceCreateInfoKHR & setDisplayMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ ) VULKAN_HPP_NOEXCEPT - { - displayMode = displayMode_; - return *this; - } - - DisplaySurfaceCreateInfoKHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT - { - planeIndex = planeIndex_; - return *this; - } - - DisplaySurfaceCreateInfoKHR & setPlaneStackIndex( uint32_t planeStackIndex_ ) VULKAN_HPP_NOEXCEPT - { - planeStackIndex = planeStackIndex_; - return *this; - } - - DisplaySurfaceCreateInfoKHR & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT - { - transform = transform_; - return *this; - } - - DisplaySurfaceCreateInfoKHR & setGlobalAlpha( float globalAlpha_ ) VULKAN_HPP_NOEXCEPT - { - globalAlpha = globalAlpha_; - return *this; - } - - DisplaySurfaceCreateInfoKHR & setAlphaMode( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) VULKAN_HPP_NOEXCEPT - { - alphaMode = alphaMode_; - return *this; - } - - DisplaySurfaceCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ ) VULKAN_HPP_NOEXCEPT - { - imageExtent = imageExtent_; - return *this; - } - - operator VkDisplaySurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplaySurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplaySurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( displayMode == rhs.displayMode ) - && ( planeIndex == rhs.planeIndex ) - && ( planeStackIndex == rhs.planeStackIndex ) - && ( transform == rhs.transform ) - && ( globalAlpha == rhs.globalAlpha ) - && ( alphaMode == rhs.alphaMode ) - && ( imageExtent == rhs.imageExtent ); - } - - bool operator!=( DisplaySurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags = {}; - VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {}; - uint32_t planeIndex = {}; - uint32_t planeStackIndex = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; - float globalAlpha = {}; - VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque; - VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {}; - }; - static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DrawIndexedIndirectCommand - { - VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( uint32_t indexCount_ = {}, - uint32_t instanceCount_ = {}, - uint32_t firstIndex_ = {}, - int32_t vertexOffset_ = {}, - uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT - : indexCount( indexCount_ ) - , instanceCount( instanceCount_ ) - , firstIndex( firstIndex_ ) - , vertexOffset( vertexOffset_ ) - , firstInstance( firstInstance_ ) - {} - - DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DrawIndexedIndirectCommand& operator=( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DrawIndexedIndirectCommand & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT - { - indexCount = indexCount_; - return *this; - } - - DrawIndexedIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT - { - instanceCount = instanceCount_; - return *this; - } - - DrawIndexedIndirectCommand & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT - { - firstIndex = firstIndex_; - return *this; - } - - DrawIndexedIndirectCommand & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT - { - vertexOffset = vertexOffset_; - return *this; - } - - DrawIndexedIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT - { - firstInstance = firstInstance_; - return *this; - } - - operator VkDrawIndexedIndirectCommand const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDrawIndexedIndirectCommand &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DrawIndexedIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( indexCount == rhs.indexCount ) - && ( instanceCount == rhs.instanceCount ) - && ( firstIndex == rhs.firstIndex ) - && ( vertexOffset == rhs.vertexOffset ) - && ( firstInstance == rhs.firstInstance ); - } - - bool operator!=( DrawIndexedIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - uint32_t indexCount = {}; - uint32_t instanceCount = {}; - uint32_t firstIndex = {}; - int32_t vertexOffset = {}; - uint32_t firstInstance = {}; - }; - static_assert( sizeof( DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DrawIndirectCommand - { - VULKAN_HPP_CONSTEXPR DrawIndirectCommand( uint32_t vertexCount_ = {}, - uint32_t instanceCount_ = {}, - uint32_t firstVertex_ = {}, - uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT - : vertexCount( vertexCount_ ) - , instanceCount( instanceCount_ ) - , firstVertex( firstVertex_ ) - , firstInstance( firstInstance_ ) - {} - - DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DrawIndirectCommand& operator=( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DrawIndirectCommand & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT - { - vertexCount = vertexCount_; - return *this; - } - - DrawIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT - { - instanceCount = instanceCount_; - return *this; - } - - DrawIndirectCommand & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT - { - firstVertex = firstVertex_; - return *this; - } - - DrawIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT - { - firstInstance = firstInstance_; - return *this; - } - - operator VkDrawIndirectCommand const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDrawIndirectCommand &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DrawIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( vertexCount == rhs.vertexCount ) - && ( instanceCount == rhs.instanceCount ) - && ( firstVertex == rhs.firstVertex ) - && ( firstInstance == rhs.firstInstance ); - } - - bool operator!=( DrawIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - uint32_t vertexCount = {}; - uint32_t instanceCount = {}; - uint32_t firstVertex = {}; - uint32_t firstInstance = {}; - }; - static_assert( sizeof( DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DrawMeshTasksIndirectCommandNV - { - VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( uint32_t taskCount_ = {}, - uint32_t firstTask_ = {} ) VULKAN_HPP_NOEXCEPT - : taskCount( taskCount_ ) - , firstTask( firstTask_ ) - {} - - DrawMeshTasksIndirectCommandNV( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DrawMeshTasksIndirectCommandNV& operator=( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DrawMeshTasksIndirectCommandNV & setTaskCount( uint32_t taskCount_ ) VULKAN_HPP_NOEXCEPT - { - taskCount = taskCount_; - return *this; - } - - DrawMeshTasksIndirectCommandNV & setFirstTask( uint32_t firstTask_ ) VULKAN_HPP_NOEXCEPT - { - firstTask = firstTask_; - return *this; - } - - operator VkDrawMeshTasksIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDrawMeshTasksIndirectCommandNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DrawMeshTasksIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( taskCount == rhs.taskCount ) - && ( firstTask == rhs.firstTask ); - } - - bool operator!=( DrawMeshTasksIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - uint32_t taskCount = {}; - uint32_t firstTask = {}; - }; - static_assert( sizeof( DrawMeshTasksIndirectCommandNV ) == sizeof( VkDrawMeshTasksIndirectCommandNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DrmFormatModifierPropertiesEXT - { - DrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {}, - uint32_t drmFormatModifierPlaneCount_ = {}, - VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT - : drmFormatModifier( drmFormatModifier_ ) - , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ) - , drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ ) - {} - - DrmFormatModifierPropertiesEXT( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DrmFormatModifierPropertiesEXT& operator=( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkDrmFormatModifierPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( drmFormatModifier == rhs.drmFormatModifier ) - && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) - && ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures ); - } - - bool operator!=( DrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - uint64_t drmFormatModifier = {}; - uint32_t drmFormatModifierPlaneCount = {}; - VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures = {}; - }; - static_assert( sizeof( DrmFormatModifierPropertiesEXT ) == sizeof( VkDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DrmFormatModifierPropertiesListEXT - { - DrmFormatModifierPropertiesListEXT( uint32_t drmFormatModifierCount_ = {}, - VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties_ = {} ) VULKAN_HPP_NOEXCEPT - : drmFormatModifierCount( drmFormatModifierCount_ ) - , pDrmFormatModifierProperties( pDrmFormatModifierProperties_ ) - {} - - VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT & operator=( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT ) - offsetof( DrmFormatModifierPropertiesListEXT, pNext ) ); - return *this; - } - - DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DrmFormatModifierPropertiesListEXT& operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkDrmFormatModifierPropertiesListEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDrmFormatModifierPropertiesListEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( DrmFormatModifierPropertiesListEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) - && ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties ); - } - - bool operator!=( DrmFormatModifierPropertiesListEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT; - void* pNext = {}; - uint32_t drmFormatModifierCount = {}; - VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties = {}; - }; - static_assert( sizeof( DrmFormatModifierPropertiesListEXT ) == sizeof( VkDrmFormatModifierPropertiesListEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct EventCreateInfo - { - VULKAN_HPP_CONSTEXPR EventCreateInfo( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - {} - - VULKAN_HPP_NAMESPACE::EventCreateInfo & operator=( VULKAN_HPP_NAMESPACE::EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::EventCreateInfo ) - offsetof( EventCreateInfo, pNext ) ); - return *this; - } - - EventCreateInfo( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - EventCreateInfo& operator=( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - EventCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - EventCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - operator VkEventCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkEventCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( EventCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ); - } - - bool operator!=( EventCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eEventCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::EventCreateFlags flags = {}; - }; - static_assert( sizeof( EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ExportFenceCreateInfo - { - VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT - : handleTypes( handleTypes_ ) - {} - - VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo ) - offsetof( ExportFenceCreateInfo, pNext ) ); - return *this; - } - - ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExportFenceCreateInfo& operator=( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ExportFenceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ExportFenceCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT - { - handleTypes = handleTypes_; - return *this; - } - - operator VkExportFenceCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExportFenceCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ExportFenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleTypes == rhs.handleTypes ); - } - - bool operator!=( ExportFenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes = {}; - }; - static_assert( sizeof( ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - struct ExportFenceWin32HandleInfoKHR - { - VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = {}, - DWORD dwAccess_ = {}, - LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT - : pAttributes( pAttributes_ ) - , dwAccess( dwAccess_ ) - , name( name_ ) - {} - - VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR ) - offsetof( ExportFenceWin32HandleInfoKHR, pNext ) ); - return *this; - } - - ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExportFenceWin32HandleInfoKHR& operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ExportFenceWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ExportFenceWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT - { - pAttributes = pAttributes_; - return *this; - } - - ExportFenceWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT - { - dwAccess = dwAccess_; - return *this; - } - - ExportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT - { - name = name_; - return *this; - } - - operator VkExportFenceWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ExportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pAttributes == rhs.pAttributes ) - && ( dwAccess == rhs.dwAccess ) - && ( name == rhs.name ); - } - - bool operator!=( ExportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR; - const void* pNext = {}; - const SECURITY_ATTRIBUTES* pAttributes = {}; - DWORD dwAccess = {}; - LPCWSTR name = {}; - }; - static_assert( sizeof( ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - struct ExportMemoryAllocateInfo - { - VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT - : handleTypes( handleTypes_ ) - {} - - VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo & operator=( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo ) - offsetof( ExportMemoryAllocateInfo, pNext ) ); - return *this; - } - - ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExportMemoryAllocateInfo& operator=( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ExportMemoryAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ExportMemoryAllocateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT - { - handleTypes = handleTypes_; - return *this; - } - - operator VkExportMemoryAllocateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExportMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ExportMemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleTypes == rhs.handleTypes ); - } - - bool operator!=( ExportMemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; - }; - static_assert( sizeof( ExportMemoryAllocateInfo ) == sizeof( VkExportMemoryAllocateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ExportMemoryAllocateInfoNV - { - VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT - : handleTypes( handleTypes_ ) - {} - - VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV & operator=( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV ) - offsetof( ExportMemoryAllocateInfoNV, pNext ) ); - return *this; - } - - ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExportMemoryAllocateInfoNV& operator=( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ExportMemoryAllocateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ExportMemoryAllocateInfoNV & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT - { - handleTypes = handleTypes_; - return *this; - } - - operator VkExportMemoryAllocateInfoNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExportMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ExportMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleTypes == rhs.handleTypes ); - } - - bool operator!=( ExportMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {}; - }; - static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - struct ExportMemoryWin32HandleInfoKHR - { - VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = {}, - DWORD dwAccess_ = {}, - LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT - : pAttributes( pAttributes_ ) - , dwAccess( dwAccess_ ) - , name( name_ ) - {} - - VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR ) - offsetof( ExportMemoryWin32HandleInfoKHR, pNext ) ); - return *this; - } - - ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExportMemoryWin32HandleInfoKHR& operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ExportMemoryWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ExportMemoryWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT - { - pAttributes = pAttributes_; - return *this; - } - - ExportMemoryWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT - { - dwAccess = dwAccess_; - return *this; - } - - ExportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT - { - name = name_; - return *this; - } - - operator VkExportMemoryWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ExportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pAttributes == rhs.pAttributes ) - && ( dwAccess == rhs.dwAccess ) - && ( name == rhs.name ); - } - - bool operator!=( ExportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR; - const void* pNext = {}; - const SECURITY_ATTRIBUTES* pAttributes = {}; - DWORD dwAccess = {}; - LPCWSTR name = {}; - }; - static_assert( sizeof( ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - struct ExportMemoryWin32HandleInfoNV - { - VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES* pAttributes_ = {}, - DWORD dwAccess_ = {} ) VULKAN_HPP_NOEXCEPT - : pAttributes( pAttributes_ ) - , dwAccess( dwAccess_ ) - {} - - VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV & operator=( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV ) - offsetof( ExportMemoryWin32HandleInfoNV, pNext ) ); - return *this; - } - - ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExportMemoryWin32HandleInfoNV& operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ExportMemoryWin32HandleInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ExportMemoryWin32HandleInfoNV & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT - { - pAttributes = pAttributes_; - return *this; - } - - ExportMemoryWin32HandleInfoNV & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT - { - dwAccess = dwAccess_; - return *this; - } - - operator VkExportMemoryWin32HandleInfoNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ExportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pAttributes == rhs.pAttributes ) - && ( dwAccess == rhs.dwAccess ); - } - - bool operator!=( ExportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV; - const void* pNext = {}; - const SECURITY_ATTRIBUTES* pAttributes = {}; - DWORD dwAccess = {}; - }; - static_assert( sizeof( ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - struct ExportSemaphoreCreateInfo - { - VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT - : handleTypes( handleTypes_ ) - {} - - VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo ) - offsetof( ExportSemaphoreCreateInfo, pNext ) ); - return *this; - } - - ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExportSemaphoreCreateInfo& operator=( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ExportSemaphoreCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ExportSemaphoreCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT - { - handleTypes = handleTypes_; - return *this; - } - - operator VkExportSemaphoreCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExportSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ExportSemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleTypes == rhs.handleTypes ); - } - - bool operator!=( ExportSemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes = {}; - }; - static_assert( sizeof( ExportSemaphoreCreateInfo ) == sizeof( VkExportSemaphoreCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - struct ExportSemaphoreWin32HandleInfoKHR - { - VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = {}, - DWORD dwAccess_ = {}, - LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT - : pAttributes( pAttributes_ ) - , dwAccess( dwAccess_ ) - , name( name_ ) - {} - - VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR ) - offsetof( ExportSemaphoreWin32HandleInfoKHR, pNext ) ); - return *this; - } - - ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExportSemaphoreWin32HandleInfoKHR& operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ExportSemaphoreWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ExportSemaphoreWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT - { - pAttributes = pAttributes_; - return *this; - } - - ExportSemaphoreWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT - { - dwAccess = dwAccess_; - return *this; - } - - ExportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT - { - name = name_; - return *this; - } - - operator VkExportSemaphoreWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pAttributes == rhs.pAttributes ) - && ( dwAccess == rhs.dwAccess ) - && ( name == rhs.name ); - } - - bool operator!=( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR; - const void* pNext = {}; - const SECURITY_ATTRIBUTES* pAttributes = {}; - DWORD dwAccess = {}; - LPCWSTR name = {}; - }; - static_assert( sizeof( ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - struct ExtensionProperties - { - ExtensionProperties( std::array const& extensionName_ = {}, - uint32_t specVersion_ = {} ) VULKAN_HPP_NOEXCEPT - : extensionName{} - , specVersion( specVersion_ ) - { - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( extensionName, extensionName_ ); - } - - ExtensionProperties( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExtensionProperties& operator=( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkExtensionProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExtensionProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ExtensionProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( memcmp( extensionName, rhs.extensionName, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 ) - && ( specVersion == rhs.specVersion ); - } - - bool operator!=( ExtensionProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - char extensionName[VK_MAX_EXTENSION_NAME_SIZE] = {}; - uint32_t specVersion = {}; - }; - static_assert( sizeof( ExtensionProperties ) == sizeof( VkExtensionProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ExternalMemoryProperties - { - ExternalMemoryProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT - : externalMemoryFeatures( externalMemoryFeatures_ ) - , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) - , compatibleHandleTypes( compatibleHandleTypes_ ) - {} - - ExternalMemoryProperties( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExternalMemoryProperties& operator=( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkExternalMemoryProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExternalMemoryProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ExternalMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( externalMemoryFeatures == rhs.externalMemoryFeatures ) - && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) - && ( compatibleHandleTypes == rhs.compatibleHandleTypes ); - } - - bool operator!=( ExternalMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes = {}; - }; - static_assert( sizeof( ExternalMemoryProperties ) == sizeof( VkExternalMemoryProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ExternalBufferProperties - { - ExternalBufferProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT - : externalMemoryProperties( externalMemoryProperties_ ) - {} - - VULKAN_HPP_NAMESPACE::ExternalBufferProperties & operator=( VULKAN_HPP_NAMESPACE::ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalBufferProperties ) - offsetof( ExternalBufferProperties, pNext ) ); - return *this; - } - - ExternalBufferProperties( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExternalBufferProperties& operator=( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkExternalBufferProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExternalBufferProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ExternalBufferProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( externalMemoryProperties == rhs.externalMemoryProperties ); - } - - bool operator!=( ExternalBufferProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalBufferProperties; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {}; - }; - static_assert( sizeof( ExternalBufferProperties ) == sizeof( VkExternalBufferProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ExternalFenceProperties - { - ExternalFenceProperties( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = {}, - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes_ = {}, - VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures_ = {} ) VULKAN_HPP_NOEXCEPT - : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) - , compatibleHandleTypes( compatibleHandleTypes_ ) - , externalFenceFeatures( externalFenceFeatures_ ) - {} - - VULKAN_HPP_NAMESPACE::ExternalFenceProperties & operator=( VULKAN_HPP_NAMESPACE::ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalFenceProperties ) - offsetof( ExternalFenceProperties, pNext ) ); - return *this; - } - - ExternalFenceProperties( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExternalFenceProperties& operator=( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkExternalFenceProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExternalFenceProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ExternalFenceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) - && ( compatibleHandleTypes == rhs.compatibleHandleTypes ) - && ( externalFenceFeatures == rhs.externalFenceFeatures ); - } - - bool operator!=( ExternalFenceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFenceProperties; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures = {}; - }; - static_assert( sizeof( ExternalFenceProperties ) == sizeof( VkExternalFenceProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - - struct ExternalFormatANDROID - { - VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( uint64_t externalFormat_ = {} ) VULKAN_HPP_NOEXCEPT - : externalFormat( externalFormat_ ) - {} - - VULKAN_HPP_NAMESPACE::ExternalFormatANDROID & operator=( VULKAN_HPP_NAMESPACE::ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalFormatANDROID ) - offsetof( ExternalFormatANDROID, pNext ) ); - return *this; - } - - ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExternalFormatANDROID& operator=( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ExternalFormatANDROID & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ExternalFormatANDROID & setExternalFormat( uint64_t externalFormat_ ) VULKAN_HPP_NOEXCEPT - { - externalFormat = externalFormat_; - return *this; - } - - operator VkExternalFormatANDROID const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExternalFormatANDROID &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ExternalFormatANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( externalFormat == rhs.externalFormat ); - } - - bool operator!=( ExternalFormatANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFormatANDROID; - void* pNext = {}; - uint64_t externalFormat = {}; - }; - static_assert( sizeof( ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - - struct ExternalImageFormatProperties - { - ExternalImageFormatProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT - : externalMemoryProperties( externalMemoryProperties_ ) - {} - - VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties & operator=( VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties ) - offsetof( ExternalImageFormatProperties, pNext ) ); - return *this; - } - - ExternalImageFormatProperties( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExternalImageFormatProperties& operator=( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkExternalImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExternalImageFormatProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ExternalImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( externalMemoryProperties == rhs.externalMemoryProperties ); - } - - bool operator!=( ExternalImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalImageFormatProperties; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {}; - }; - static_assert( sizeof( ExternalImageFormatProperties ) == sizeof( VkExternalImageFormatProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ImageFormatProperties - { - ImageFormatProperties( VULKAN_HPP_NAMESPACE::Extent3D maxExtent_ = {}, - uint32_t maxMipLevels_ = {}, - uint32_t maxArrayLayers_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize_ = {} ) VULKAN_HPP_NOEXCEPT - : maxExtent( maxExtent_ ) - , maxMipLevels( maxMipLevels_ ) - , maxArrayLayers( maxArrayLayers_ ) - , sampleCounts( sampleCounts_ ) - , maxResourceSize( maxResourceSize_ ) - {} - - ImageFormatProperties( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageFormatProperties& operator=( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageFormatProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( maxExtent == rhs.maxExtent ) - && ( maxMipLevels == rhs.maxMipLevels ) - && ( maxArrayLayers == rhs.maxArrayLayers ) - && ( sampleCounts == rhs.sampleCounts ) - && ( maxResourceSize == rhs.maxResourceSize ); - } - - bool operator!=( ImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::Extent3D maxExtent = {}; - uint32_t maxMipLevels = {}; - uint32_t maxArrayLayers = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {}; - VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize = {}; - }; - static_assert( sizeof( ImageFormatProperties ) == sizeof( VkImageFormatProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ExternalImageFormatPropertiesNV - { - ExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT - : imageFormatProperties( imageFormatProperties_ ) - , externalMemoryFeatures( externalMemoryFeatures_ ) - , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) - , compatibleHandleTypes( compatibleHandleTypes_ ) - {} - - ExternalImageFormatPropertiesNV( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExternalImageFormatPropertiesNV& operator=( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkExternalImageFormatPropertiesNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExternalImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ExternalImageFormatPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( imageFormatProperties == rhs.imageFormatProperties ) - && ( externalMemoryFeatures == rhs.externalMemoryFeatures ) - && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) - && ( compatibleHandleTypes == rhs.compatibleHandleTypes ); - } - - bool operator!=( ExternalImageFormatPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes = {}; - }; - static_assert( sizeof( ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ExternalMemoryBufferCreateInfo - { - VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT - : handleTypes( handleTypes_ ) - {} - - VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo ) - offsetof( ExternalMemoryBufferCreateInfo, pNext ) ); - return *this; - } - - ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExternalMemoryBufferCreateInfo& operator=( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ExternalMemoryBufferCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ExternalMemoryBufferCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT - { - handleTypes = handleTypes_; - return *this; - } - - operator VkExternalMemoryBufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExternalMemoryBufferCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ExternalMemoryBufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleTypes == rhs.handleTypes ); - } - - bool operator!=( ExternalMemoryBufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryBufferCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; - }; - static_assert( sizeof( ExternalMemoryBufferCreateInfo ) == sizeof( VkExternalMemoryBufferCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ExternalMemoryImageCreateInfo - { - VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT - : handleTypes( handleTypes_ ) - {} - - VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo ) - offsetof( ExternalMemoryImageCreateInfo, pNext ) ); - return *this; - } - - ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExternalMemoryImageCreateInfo& operator=( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ExternalMemoryImageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ExternalMemoryImageCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT - { - handleTypes = handleTypes_; - return *this; - } - - operator VkExternalMemoryImageCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExternalMemoryImageCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ExternalMemoryImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleTypes == rhs.handleTypes ); - } - - bool operator!=( ExternalMemoryImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; - }; - static_assert( sizeof( ExternalMemoryImageCreateInfo ) == sizeof( VkExternalMemoryImageCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ExternalMemoryImageCreateInfoNV - { - VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT - : handleTypes( handleTypes_ ) - {} - - VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV ) - offsetof( ExternalMemoryImageCreateInfoNV, pNext ) ); - return *this; - } - - ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExternalMemoryImageCreateInfoNV& operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ExternalMemoryImageCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ExternalMemoryImageCreateInfoNV & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT - { - handleTypes = handleTypes_; - return *this; - } - - operator VkExternalMemoryImageCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExternalMemoryImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ExternalMemoryImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleTypes == rhs.handleTypes ); - } - - bool operator!=( ExternalMemoryImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {}; - }; - static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ExternalSemaphoreProperties - { - ExternalSemaphoreProperties( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = {}, - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = {}, - VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = {} ) VULKAN_HPP_NOEXCEPT - : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) - , compatibleHandleTypes( compatibleHandleTypes_ ) - , externalSemaphoreFeatures( externalSemaphoreFeatures_ ) - {} - - VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties & operator=( VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties ) - offsetof( ExternalSemaphoreProperties, pNext ) ); - return *this; - } - - ExternalSemaphoreProperties( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExternalSemaphoreProperties& operator=( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkExternalSemaphoreProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExternalSemaphoreProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ExternalSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) - && ( compatibleHandleTypes == rhs.compatibleHandleTypes ) - && ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures ); - } - - bool operator!=( ExternalSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalSemaphoreProperties; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures = {}; - }; - static_assert( sizeof( ExternalSemaphoreProperties ) == sizeof( VkExternalSemaphoreProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct FenceCreateInfo - { - VULKAN_HPP_CONSTEXPR FenceCreateInfo( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - {} - - VULKAN_HPP_NAMESPACE::FenceCreateInfo & operator=( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FenceCreateInfo ) - offsetof( FenceCreateInfo, pNext ) ); - return *this; - } - - FenceCreateInfo( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - FenceCreateInfo& operator=( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - FenceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - FenceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - operator VkFenceCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkFenceCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( FenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ); - } - - bool operator!=( FenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::FenceCreateFlags flags = {}; - }; - static_assert( sizeof( FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct FenceGetFdInfoKHR - { - VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT - : fence( fence_ ) - , handleType( handleType_ ) - {} - - VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & operator=( VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR ) - offsetof( FenceGetFdInfoKHR, pNext ) ); - return *this; - } - - FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - FenceGetFdInfoKHR& operator=( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - FenceGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - FenceGetFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT - { - fence = fence_; - return *this; - } - - FenceGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - operator VkFenceGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkFenceGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( FenceGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fence == rhs.fence ) - && ( handleType == rhs.handleType ); - } - - bool operator!=( FenceGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetFdInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Fence fence = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; - }; - static_assert( sizeof( FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - struct FenceGetWin32HandleInfoKHR - { - VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT - : fence( fence_ ) - , handleType( handleType_ ) - {} - - VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR ) - offsetof( FenceGetWin32HandleInfoKHR, pNext ) ); - return *this; - } - - FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - FenceGetWin32HandleInfoKHR& operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - FenceGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - FenceGetWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT - { - fence = fence_; - return *this; - } - - FenceGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - operator VkFenceGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkFenceGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( FenceGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fence == rhs.fence ) - && ( handleType == rhs.handleType ); - } - - bool operator!=( FenceGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Fence fence = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; - }; - static_assert( sizeof( FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - struct FilterCubicImageViewImageFormatPropertiesEXT - { - FilterCubicImageViewImageFormatPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 filterCubic_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax_ = {} ) VULKAN_HPP_NOEXCEPT - : filterCubic( filterCubic_ ) - , filterCubicMinmax( filterCubicMinmax_ ) - {} - - VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT ) - offsetof( FilterCubicImageViewImageFormatPropertiesEXT, pNext ) ); - return *this; - } - - FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - FilterCubicImageViewImageFormatPropertiesEXT& operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkFilterCubicImageViewImageFormatPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkFilterCubicImageViewImageFormatPropertiesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( FilterCubicImageViewImageFormatPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( filterCubic == rhs.filterCubic ) - && ( filterCubicMinmax == rhs.filterCubicMinmax ); - } - - bool operator!=( FilterCubicImageViewImageFormatPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 filterCubic = {}; - VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax = {}; - }; - static_assert( sizeof( FilterCubicImageViewImageFormatPropertiesEXT ) == sizeof( VkFilterCubicImageViewImageFormatPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct FormatProperties - { - FormatProperties( VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures_ = {}, - VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures_ = {}, - VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures_ = {} ) VULKAN_HPP_NOEXCEPT - : linearTilingFeatures( linearTilingFeatures_ ) - , optimalTilingFeatures( optimalTilingFeatures_ ) - , bufferFeatures( bufferFeatures_ ) - {} - - FormatProperties( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - FormatProperties& operator=( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkFormatProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkFormatProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( FormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( linearTilingFeatures == rhs.linearTilingFeatures ) - && ( optimalTilingFeatures == rhs.optimalTilingFeatures ) - && ( bufferFeatures == rhs.bufferFeatures ); - } - - bool operator!=( FormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures = {}; - VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures = {}; - VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures = {}; - }; - static_assert( sizeof( FormatProperties ) == sizeof( VkFormatProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct FormatProperties2 - { - FormatProperties2( VULKAN_HPP_NAMESPACE::FormatProperties formatProperties_ = {} ) VULKAN_HPP_NOEXCEPT - : formatProperties( formatProperties_ ) - {} - - VULKAN_HPP_NAMESPACE::FormatProperties2 & operator=( VULKAN_HPP_NAMESPACE::FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FormatProperties2 ) - offsetof( FormatProperties2, pNext ) ); - return *this; - } - - FormatProperties2( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - FormatProperties2& operator=( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkFormatProperties2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( FormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( formatProperties == rhs.formatProperties ); - } - - bool operator!=( FormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties2; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::FormatProperties formatProperties = {}; - }; - static_assert( sizeof( FormatProperties2 ) == sizeof( VkFormatProperties2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct FramebufferAttachmentImageInfo - { - VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, - uint32_t width_ = {}, - uint32_t height_ = {}, - uint32_t layerCount_ = {}, - uint32_t viewFormatCount_ = {}, - const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , usage( usage_ ) - , width( width_ ) - , height( height_ ) - , layerCount( layerCount_ ) - , viewFormatCount( viewFormatCount_ ) - , pViewFormats( pViewFormats_ ) - {} - - VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo & operator=( VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo ) - offsetof( FramebufferAttachmentImageInfo, pNext ) ); - return *this; - } - - FramebufferAttachmentImageInfo( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - FramebufferAttachmentImageInfo& operator=( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - FramebufferAttachmentImageInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - FramebufferAttachmentImageInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - FramebufferAttachmentImageInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT - { - usage = usage_; - return *this; - } - - FramebufferAttachmentImageInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT - { - width = width_; - return *this; - } - - FramebufferAttachmentImageInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT - { - height = height_; - return *this; - } - - FramebufferAttachmentImageInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT - { - layerCount = layerCount_; - return *this; - } - - FramebufferAttachmentImageInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT - { - viewFormatCount = viewFormatCount_; - return *this; - } - - FramebufferAttachmentImageInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ ) VULKAN_HPP_NOEXCEPT - { - pViewFormats = pViewFormats_; - return *this; - } - - operator VkFramebufferAttachmentImageInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkFramebufferAttachmentImageInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( FramebufferAttachmentImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( usage == rhs.usage ) - && ( width == rhs.width ) - && ( height == rhs.height ) - && ( layerCount == rhs.layerCount ) - && ( viewFormatCount == rhs.viewFormatCount ) - && ( pViewFormats == rhs.pViewFormats ); - } - - bool operator!=( FramebufferAttachmentImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentImageInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; - uint32_t width = {}; - uint32_t height = {}; - uint32_t layerCount = {}; - uint32_t viewFormatCount = {}; - const VULKAN_HPP_NAMESPACE::Format* pViewFormats = {}; - }; - static_assert( sizeof( FramebufferAttachmentImageInfo ) == sizeof( VkFramebufferAttachmentImageInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct FramebufferAttachmentsCreateInfo - { - VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( uint32_t attachmentImageInfoCount_ = {}, - const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo* pAttachmentImageInfos_ = {} ) VULKAN_HPP_NOEXCEPT - : attachmentImageInfoCount( attachmentImageInfoCount_ ) - , pAttachmentImageInfos( pAttachmentImageInfos_ ) - {} - - VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo & operator=( VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo ) - offsetof( FramebufferAttachmentsCreateInfo, pNext ) ); - return *this; - } - - FramebufferAttachmentsCreateInfo( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - FramebufferAttachmentsCreateInfo& operator=( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - FramebufferAttachmentsCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - FramebufferAttachmentsCreateInfo & setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) VULKAN_HPP_NOEXCEPT - { - attachmentImageInfoCount = attachmentImageInfoCount_; - return *this; - } - - FramebufferAttachmentsCreateInfo & setPAttachmentImageInfos( const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo* pAttachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT - { - pAttachmentImageInfos = pAttachmentImageInfos_; - return *this; - } - - operator VkFramebufferAttachmentsCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkFramebufferAttachmentsCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( FramebufferAttachmentsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( attachmentImageInfoCount == rhs.attachmentImageInfoCount ) - && ( pAttachmentImageInfos == rhs.pAttachmentImageInfos ); - } - - bool operator!=( FramebufferAttachmentsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentsCreateInfo; - const void* pNext = {}; - uint32_t attachmentImageInfoCount = {}; - const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo* pAttachmentImageInfos = {}; - }; - static_assert( sizeof( FramebufferAttachmentsCreateInfo ) == sizeof( VkFramebufferAttachmentsCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct FramebufferCreateInfo - { - VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, - uint32_t attachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ = {}, - uint32_t width_ = {}, - uint32_t height_ = {}, - uint32_t layers_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , renderPass( renderPass_ ) - , attachmentCount( attachmentCount_ ) - , pAttachments( pAttachments_ ) - , width( width_ ) - , height( height_ ) - , layers( layers_ ) - {} - - VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & operator=( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo ) - offsetof( FramebufferCreateInfo, pNext ) ); - return *this; - } - - FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - FramebufferCreateInfo& operator=( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - FramebufferCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - FramebufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - FramebufferCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT + RenderPassBeginInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT { renderPass = renderPass_; return *this; } - FramebufferCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + RenderPassBeginInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT { - attachmentCount = attachmentCount_; + framebuffer = framebuffer_; return *this; } - FramebufferCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ ) VULKAN_HPP_NOEXCEPT + RenderPassBeginInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT { - pAttachments = pAttachments_; + renderArea = renderArea_; return *this; } - FramebufferCreateInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + RenderPassBeginInfo & setClearValueCount( uint32_t clearValueCount_ ) VULKAN_HPP_NOEXCEPT { - width = width_; + clearValueCount = clearValueCount_; return *this; } - FramebufferCreateInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + RenderPassBeginInfo & setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ ) VULKAN_HPP_NOEXCEPT { - height = height_; + pClearValues = pClearValues_; return *this; } - FramebufferCreateInfo & setLayers( uint32_t layers_ ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RenderPassBeginInfo & setClearValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & clearValues_ ) VULKAN_HPP_NOEXCEPT { - layers = layers_; + clearValueCount = static_cast( clearValues_.size() ); + pClearValues = clearValues_.data(); return *this; } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) - operator VkFramebufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT + + operator VkRenderPassBeginInfo const&() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkFramebufferCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - bool operator==( FramebufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RenderPassBeginInfo const& ) const = default; +#else + bool operator==( RenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) && ( renderPass == rhs.renderPass ) - && ( attachmentCount == rhs.attachmentCount ) - && ( pAttachments == rhs.pAttachments ) - && ( width == rhs.width ) - && ( height == rhs.height ) - && ( layers == rhs.layers ); + && ( framebuffer == rhs.framebuffer ) + && ( renderArea == rhs.renderArea ) + && ( clearValueCount == rhs.clearValueCount ) + && ( pClearValues == rhs.pClearValues ); } - bool operator!=( FramebufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif + + public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferCreateInfo; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassBeginInfo; const void* pNext = {}; - VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags = {}; VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; - uint32_t attachmentCount = {}; - const VULKAN_HPP_NAMESPACE::ImageView* pAttachments = {}; - uint32_t width = {}; - uint32_t height = {}; - uint32_t layers = {}; - }; - static_assert( sizeof( FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {}; + VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; + uint32_t clearValueCount = {}; + const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues = {}; - struct FramebufferMixedSamplesCombinationNV + }; + static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType { - FramebufferMixedSamplesCombinationNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge, - VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples_ = {} ) VULKAN_HPP_NOEXCEPT - : coverageReductionMode( coverageReductionMode_ ) - , rasterizationSamples( rasterizationSamples_ ) - , depthStencilSamples( depthStencilSamples_ ) - , colorSamples( colorSamples_ ) + using Type = RenderPassBeginInfo; + }; + + struct SubpassBeginInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassBeginInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassBeginInfo(VULKAN_HPP_NAMESPACE::SubpassContents contents_ = VULKAN_HPP_NAMESPACE::SubpassContents::eInline) VULKAN_HPP_NOEXCEPT + : contents( contents_ ) {} - VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV & operator=( VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV ) - offsetof( FramebufferMixedSamplesCombinationNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR SubpassBeginInfo( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - FramebufferMixedSamplesCombinationNV( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT + SubpassBeginInfo( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - FramebufferMixedSamplesCombinationNV& operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT + SubpassBeginInfo & operator=( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkFramebufferMixedSamplesCombinationNV const&() const VULKAN_HPP_NOEXCEPT + SubpassBeginInfo & operator=( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + memcpy( static_cast( this ), &rhs, sizeof( SubpassBeginInfo ) ); + return *this; } - operator VkFramebufferMixedSamplesCombinationNV &() VULKAN_HPP_NOEXCEPT + SubpassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - bool operator==( FramebufferMixedSamplesCombinationNV const& rhs ) const VULKAN_HPP_NOEXCEPT + SubpassBeginInfo & setContents( VULKAN_HPP_NAMESPACE::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT + { + contents = contents_; + return *this; + } + + + operator VkSubpassBeginInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SubpassBeginInfo const& ) const = default; +#else + bool operator==( SubpassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) - && ( coverageReductionMode == rhs.coverageReductionMode ) - && ( rasterizationSamples == rhs.rasterizationSamples ) - && ( depthStencilSamples == rhs.depthStencilSamples ) - && ( colorSamples == rhs.colorSamples ); + && ( contents == rhs.contents ); } - bool operator!=( FramebufferMixedSamplesCombinationNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubpassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif + + public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferMixedSamplesCombinationNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge; - VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; - VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples = {}; - }; - static_assert( sizeof( FramebufferMixedSamplesCombinationNV ) == sizeof( VkFramebufferMixedSamplesCombinationNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassBeginInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline; - struct VertexInputBindingDescription + }; + static_assert( sizeof( SubpassBeginInfo ) == sizeof( VkSubpassBeginInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR VertexInputBindingDescription( uint32_t binding_ = {}, - uint32_t stride_ = {}, - VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex ) VULKAN_HPP_NOEXCEPT - : binding( binding_ ) - , stride( stride_ ) - , inputRate( inputRate_ ) + using Type = SubpassBeginInfo; + }; + using SubpassBeginInfoKHR = SubpassBeginInfo; + + struct ImageBlit + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 ImageBlit(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, std::array const& srcOffsets_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, std::array const& dstOffsets_ = {}) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ), srcOffsets( srcOffsets_ ), dstSubresource( dstSubresource_ ), dstOffsets( dstOffsets_ ) {} - VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageBlit( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VertexInputBindingDescription& operator=( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT + ImageBlit & operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - VertexInputBindingDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + ImageBlit & operator=( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT { - binding = binding_; + memcpy( static_cast( this ), &rhs, sizeof( ImageBlit ) ); return *this; } - VertexInputBindingDescription & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + ImageBlit & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT { - stride = stride_; + srcSubresource = srcSubresource_; return *this; } - VertexInputBindingDescription & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT + ImageBlit & setSrcOffsets( std::array const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT { - inputRate = inputRate_; + srcOffsets = srcOffsets_; return *this; } - operator VkVertexInputBindingDescription const&() const VULKAN_HPP_NOEXCEPT + ImageBlit & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + dstSubresource = dstSubresource_; + return *this; } - operator VkVertexInputBindingDescription &() VULKAN_HPP_NOEXCEPT + ImageBlit & setDstOffsets( std::array const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + dstOffsets = dstOffsets_; + return *this; } - bool operator==( VertexInputBindingDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + + operator VkImageBlit const&() const VULKAN_HPP_NOEXCEPT { - return ( binding == rhs.binding ) - && ( stride == rhs.stride ) - && ( inputRate == rhs.inputRate ); + return *reinterpret_cast( this ); } - bool operator!=( VertexInputBindingDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + operator VkImageBlit &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageBlit const& ) const = default; +#else + bool operator==( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( srcSubresource == rhs.srcSubresource ) + && ( srcOffsets == rhs.srcOffsets ) + && ( dstSubresource == rhs.dstSubresource ) + && ( dstOffsets == rhs.dstOffsets ); + } + + bool operator!=( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif + + public: - uint32_t binding = {}; - uint32_t stride = {}; - VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex; - }; - static_assert( sizeof( VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D srcOffsets = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D dstOffsets = {}; - struct VertexInputAttributeDescription + }; + static_assert( sizeof( ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageSubresourceRange { - VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( uint32_t location_ = {}, - uint32_t binding_ = {}, - VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - uint32_t offset_ = {} ) VULKAN_HPP_NOEXCEPT - : location( location_ ) - , binding( binding_ ) - , format( format_ ) - , offset( offset_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageSubresourceRange(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t baseMipLevel_ = {}, uint32_t levelCount_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {}) VULKAN_HPP_NOEXCEPT + : aspectMask( aspectMask_ ), baseMipLevel( baseMipLevel_ ), levelCount( levelCount_ ), baseArrayLayer( baseArrayLayer_ ), layerCount( layerCount_ ) {} - VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ImageSubresourceRange( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VertexInputAttributeDescription& operator=( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT + ImageSubresourceRange & operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - VertexInputAttributeDescription & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT + ImageSubresourceRange & operator=( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT { - location = location_; + memcpy( static_cast( this ), &rhs, sizeof( ImageSubresourceRange ) ); return *this; } - VertexInputAttributeDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + ImageSubresourceRange & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT { - binding = binding_; + aspectMask = aspectMask_; return *this; } - VertexInputAttributeDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + ImageSubresourceRange & setBaseMipLevel( uint32_t baseMipLevel_ ) VULKAN_HPP_NOEXCEPT { - format = format_; + baseMipLevel = baseMipLevel_; return *this; } - VertexInputAttributeDescription & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + ImageSubresourceRange & setLevelCount( uint32_t levelCount_ ) VULKAN_HPP_NOEXCEPT + { + levelCount = levelCount_; + return *this; + } + + ImageSubresourceRange & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + ImageSubresourceRange & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } + + + operator VkImageSubresourceRange const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageSubresourceRange const& ) const = default; +#else + bool operator==( ImageSubresourceRange const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( aspectMask == rhs.aspectMask ) + && ( baseMipLevel == rhs.baseMipLevel ) + && ( levelCount == rhs.levelCount ) + && ( baseArrayLayer == rhs.baseArrayLayer ) + && ( layerCount == rhs.layerCount ); + } + + bool operator!=( ImageSubresourceRange const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t baseMipLevel = {}; + uint32_t levelCount = {}; + uint32_t baseArrayLayer = {}; + uint32_t layerCount = {}; + + }; + static_assert( sizeof( ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageCopy + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCopy(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageCopy( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ImageCopy & operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageCopy & operator=( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImageCopy ) ); + return *this; + } + + ImageCopy & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + ImageCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + ImageCopy & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + ImageCopy & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + ImageCopy & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + + operator VkImageCopy const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageCopy &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageCopy const& ) const = default; +#else + bool operator==( ImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( srcSubresource == rhs.srcSubresource ) + && ( srcOffset == rhs.srcOffset ) + && ( dstSubresource == rhs.dstSubresource ) + && ( dstOffset == rhs.dstOffset ) + && ( extent == rhs.extent ); + } + + bool operator!=( ImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + + }; + static_assert( sizeof( ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SubpassEndInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassEndInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassEndInfo() VULKAN_HPP_NOEXCEPT + + {} + + VULKAN_HPP_CONSTEXPR SubpassEndInfo( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassEndInfo( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SubpassEndInfo & operator=( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SubpassEndInfo & operator=( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SubpassEndInfo ) ); + return *this; + } + + SubpassEndInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + + operator VkSubpassEndInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SubpassEndInfo const& ) const = default; +#else + bool operator==( SubpassEndInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ); + } + + bool operator!=( SubpassEndInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfo; + const void* pNext = {}; + + }; + static_assert( sizeof( SubpassEndInfo ) == sizeof( VkSubpassEndInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SubpassEndInfo; + }; + using SubpassEndInfoKHR = SubpassEndInfo; + + class IndirectCommandsLayoutNV + { + public: + using CType = VkIndirectCommandsLayoutNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV() VULKAN_HPP_NOEXCEPT + : m_indirectCommandsLayoutNV(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_indirectCommandsLayoutNV(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutNV( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT + : m_indirectCommandsLayoutNV( indirectCommandsLayoutNV ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + IndirectCommandsLayoutNV & operator=(VkIndirectCommandsLayoutNV indirectCommandsLayoutNV) VULKAN_HPP_NOEXCEPT + { + m_indirectCommandsLayoutNV = indirectCommandsLayoutNV; + return *this; + } +#endif + + IndirectCommandsLayoutNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_indirectCommandsLayoutNV = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( IndirectCommandsLayoutNV const& ) const = default; +#else + bool operator==( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV == rhs.m_indirectCommandsLayoutNV; + } + + bool operator!=(IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV != rhs.m_indirectCommandsLayoutNV; + } + + bool operator<(IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV < rhs.m_indirectCommandsLayoutNV; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNV() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV == VK_NULL_HANDLE; + } + + private: + VkIndirectCommandsLayoutNV m_indirectCommandsLayoutNV; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV ) == sizeof( VkIndirectCommandsLayoutNV ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV; + }; + + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct IndirectCommandsStreamNV + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ), offset( offset_ ) + {} + + VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsStreamNV( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + IndirectCommandsStreamNV & operator=( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + IndirectCommandsStreamNV & operator=( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( IndirectCommandsStreamNV ) ); + return *this; + } + + IndirectCommandsStreamNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + IndirectCommandsStreamNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } - operator VkVertexInputAttributeDescription const&() const VULKAN_HPP_NOEXCEPT + + operator VkIndirectCommandsStreamNV const&() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVertexInputAttributeDescription &() VULKAN_HPP_NOEXCEPT + operator VkIndirectCommandsStreamNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - bool operator==( VertexInputAttributeDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( IndirectCommandsStreamNV const& ) const = default; +#else + bool operator==( IndirectCommandsStreamNV const& rhs ) const VULKAN_HPP_NOEXCEPT { - return ( location == rhs.location ) - && ( binding == rhs.binding ) - && ( format == rhs.format ) + return ( buffer == rhs.buffer ) && ( offset == rhs.offset ); } - bool operator!=( VertexInputAttributeDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( IndirectCommandsStreamNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif + + public: - uint32_t location = {}; - uint32_t binding = {}; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - uint32_t offset = {}; - }; - static_assert( sizeof( VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; - struct PipelineVertexInputStateCreateInfo + }; + static_assert( sizeof( IndirectCommandsStreamNV ) == sizeof( VkIndirectCommandsStreamNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct GeneratedCommandsInfoNV { - VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ = {}, - uint32_t vertexBindingDescriptionCount_ = {}, - const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions_ = {}, - uint32_t vertexAttributeDescriptionCount_ = {}, - const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , vertexBindingDescriptionCount( vertexBindingDescriptionCount_ ) - , pVertexBindingDescriptions( pVertexBindingDescriptions_ ) - , vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ ) - , pVertexAttributeDescriptions( pVertexAttributeDescriptions_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, uint32_t streamCount_ = {}, const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams_ = {}, uint32_t sequencesCount_ = {}, VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {}) VULKAN_HPP_NOEXCEPT + : pipelineBindPoint( pipelineBindPoint_ ), pipeline( pipeline_ ), indirectCommandsLayout( indirectCommandsLayout_ ), streamCount( streamCount_ ), pStreams( pStreams_ ), sequencesCount( sequencesCount_ ), preprocessBuffer( preprocessBuffer_ ), preprocessOffset( preprocessOffset_ ), preprocessSize( preprocessSize_ ), sequencesCountBuffer( sequencesCountBuffer_ ), sequencesCountOffset( sequencesCountOffset_ ), sequencesIndexBuffer( sequencesIndexBuffer_ ), sequencesIndexOffset( sequencesIndexOffset_ ) {} - VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo ) - offsetof( PipelineVertexInputStateCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsInfoNV( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineVertexInputStateCreateInfo& operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + GeneratedCommandsInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, VULKAN_HPP_NAMESPACE::Pipeline pipeline_, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & streams_, uint32_t sequencesCount_ = {}, VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {} ) + : pipelineBindPoint( pipelineBindPoint_ ), pipeline( pipeline_ ), indirectCommandsLayout( indirectCommandsLayout_ ), streamCount( static_cast( streams_.size() ) ), pStreams( streams_.data() ), sequencesCount( sequencesCount_ ), preprocessBuffer( preprocessBuffer_ ), preprocessOffset( preprocessOffset_ ), preprocessSize( preprocessSize_ ), sequencesCountBuffer( sequencesCountBuffer_ ), sequencesCountOffset( sequencesCountOffset_ ), sequencesIndexBuffer( sequencesIndexBuffer_ ), sequencesIndexOffset( sequencesIndexOffset_ ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + GeneratedCommandsInfoNV & operator=( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineVertexInputStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsInfoNV & operator=( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( GeneratedCommandsInfoNV ) ); + return *this; + } + + GeneratedCommandsInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineVertexInputStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + pipelineBindPoint = pipelineBindPoint_; return *this; } - PipelineVertexInputStateCreateInfo & setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT { - vertexBindingDescriptionCount = vertexBindingDescriptionCount_; + pipeline = pipeline_; return *this; } - PipelineVertexInputStateCreateInfo & setPVertexBindingDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsInfoNV & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT { - pVertexBindingDescriptions = pVertexBindingDescriptions_; + indirectCommandsLayout = indirectCommandsLayout_; return *this; } - PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT { - vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_; + streamCount = streamCount_; return *this; } - PipelineVertexInputStateCreateInfo & setPVertexAttributeDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsInfoNV & setPStreams( const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams_ ) VULKAN_HPP_NOEXCEPT { - pVertexAttributeDescriptions = pVertexAttributeDescriptions_; + pStreams = pStreams_; return *this; } - operator VkPipelineVertexInputStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + GeneratedCommandsInfoNV & setStreams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & streams_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + streamCount = static_cast( streams_.size() ); + pStreams = streams_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + GeneratedCommandsInfoNV & setSequencesCount( uint32_t sequencesCount_ ) VULKAN_HPP_NOEXCEPT + { + sequencesCount = sequencesCount_; + return *this; } - operator VkPipelineVertexInputStateCreateInfo &() VULKAN_HPP_NOEXCEPT + GeneratedCommandsInfoNV & setPreprocessBuffer( VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + preprocessBuffer = preprocessBuffer_; + return *this; } - bool operator==( PipelineVertexInputStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + GeneratedCommandsInfoNV & setPreprocessOffset( VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ ) VULKAN_HPP_NOEXCEPT + { + preprocessOffset = preprocessOffset_; + return *this; + } + + GeneratedCommandsInfoNV & setPreprocessSize( VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT + { + preprocessSize = preprocessSize_; + return *this; + } + + GeneratedCommandsInfoNV & setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT + { + sequencesCountBuffer = sequencesCountBuffer_; + return *this; + } + + GeneratedCommandsInfoNV & setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT + { + sequencesCountOffset = sequencesCountOffset_; + return *this; + } + + GeneratedCommandsInfoNV & setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT + { + sequencesIndexBuffer = sequencesIndexBuffer_; + return *this; + } + + GeneratedCommandsInfoNV & setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT + { + sequencesIndexOffset = sequencesIndexOffset_; + return *this; + } + + + operator VkGeneratedCommandsInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( GeneratedCommandsInfoNV const& ) const = default; +#else + bool operator==( GeneratedCommandsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount ) - && ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions ) - && ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount ) - && ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions ); + && ( pipelineBindPoint == rhs.pipelineBindPoint ) + && ( pipeline == rhs.pipeline ) + && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) + && ( streamCount == rhs.streamCount ) + && ( pStreams == rhs.pStreams ) + && ( sequencesCount == rhs.sequencesCount ) + && ( preprocessBuffer == rhs.preprocessBuffer ) + && ( preprocessOffset == rhs.preprocessOffset ) + && ( preprocessSize == rhs.preprocessSize ) + && ( sequencesCountBuffer == rhs.sequencesCountBuffer ) + && ( sequencesCountOffset == rhs.sequencesCountOffset ) + && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer ) + && ( sequencesIndexOffset == rhs.sequencesIndexOffset ); } - bool operator!=( PipelineVertexInputStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( GeneratedCommandsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif + + public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoNV; const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags = {}; - uint32_t vertexBindingDescriptionCount = {}; - const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions = {}; - uint32_t vertexAttributeDescriptionCount = {}; - const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions = {}; - }; - static_assert( sizeof( PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {}; + uint32_t streamCount = {}; + const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams = {}; + uint32_t sequencesCount = {}; + VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize = {}; + VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset = {}; + VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {}; - struct PipelineInputAssemblyStateCreateInfo + }; + static_assert( sizeof( GeneratedCommandsInfoNV ) == sizeof( VkGeneratedCommandsInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList, - VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , topology( topology_ ) - , primitiveRestartEnable( primitiveRestartEnable_ ) + using Type = GeneratedCommandsInfoNV; + }; + + struct MemoryBarrier + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}) VULKAN_HPP_NOEXCEPT + : srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ) {} - VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo ) - offsetof( PipelineInputAssemblyStateCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR MemoryBarrier( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PipelineInputAssemblyStateCreateInfo& operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryBarrier & operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineInputAssemblyStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + MemoryBarrier & operator=( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( MemoryBarrier ) ); + return *this; + } + + MemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineInputAssemblyStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + MemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + srcAccessMask = srcAccessMask_; return *this; } - PipelineInputAssemblyStateCreateInfo & setTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ ) VULKAN_HPP_NOEXCEPT + MemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT { - topology = topology_; + dstAccessMask = dstAccessMask_; return *this; } - PipelineInputAssemblyStateCreateInfo & setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ ) VULKAN_HPP_NOEXCEPT + + operator VkMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT { - primitiveRestartEnable = primitiveRestartEnable_; - return *this; + return *reinterpret_cast( this ); } - operator VkPipelineInputAssemblyStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineInputAssemblyStateCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - bool operator==( PipelineInputAssemblyStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryBarrier const& ) const = default; +#else + bool operator==( MemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( topology == rhs.topology ) - && ( primitiveRestartEnable == rhs.primitiveRestartEnable ); + && ( srcAccessMask == rhs.srcAccessMask ) + && ( dstAccessMask == rhs.dstAccessMask ); } - bool operator!=( PipelineInputAssemblyStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif + + public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier; const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::PrimitiveTopology topology = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList; - VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable = {}; - }; - static_assert( sizeof( PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; - struct PipelineTessellationStateCreateInfo + }; + static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ = {}, - uint32_t patchControlPoints_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , patchControlPoints( patchControlPoints_ ) + using Type = MemoryBarrier; + }; + + struct ImageMemoryBarrier + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageMemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}) VULKAN_HPP_NOEXCEPT + : srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ), oldLayout( oldLayout_ ), newLayout( newLayout_ ), srcQueueFamilyIndex( srcQueueFamilyIndex_ ), dstQueueFamilyIndex( dstQueueFamilyIndex_ ), image( image_ ), subresourceRange( subresourceRange_ ) {} - VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo ) - offsetof( PipelineTessellationStateCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PipelineTessellationStateCreateInfo& operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ImageMemoryBarrier & operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineTessellationStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImageMemoryBarrier & operator=( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImageMemoryBarrier ) ); + return *this; + } + + ImageMemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineTessellationStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + ImageMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + srcAccessMask = srcAccessMask_; return *this; } - PipelineTessellationStateCreateInfo & setPatchControlPoints( uint32_t patchControlPoints_ ) VULKAN_HPP_NOEXCEPT + ImageMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT { - patchControlPoints = patchControlPoints_; + dstAccessMask = dstAccessMask_; return *this; } - operator VkPipelineTessellationStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + ImageMemoryBarrier & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + oldLayout = oldLayout_; + return *this; } - operator VkPipelineTessellationStateCreateInfo &() VULKAN_HPP_NOEXCEPT + ImageMemoryBarrier & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + newLayout = newLayout_; + return *this; } - bool operator==( PipelineTessellationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + ImageMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + srcQueueFamilyIndex = srcQueueFamilyIndex_; + return *this; + } + + ImageMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + dstQueueFamilyIndex = dstQueueFamilyIndex_; + return *this; + } + + ImageMemoryBarrier & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + ImageMemoryBarrier & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT + { + subresourceRange = subresourceRange_; + return *this; + } + + + operator VkImageMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageMemoryBarrier const& ) const = default; +#else + bool operator==( ImageMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( patchControlPoints == rhs.patchControlPoints ); + && ( srcAccessMask == rhs.srcAccessMask ) + && ( dstAccessMask == rhs.dstAccessMask ) + && ( oldLayout == rhs.oldLayout ) + && ( newLayout == rhs.newLayout ) + && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) + && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) + && ( image == rhs.image ) + && ( subresourceRange == rhs.subresourceRange ); } - bool operator!=( PipelineTessellationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif + + public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationStateCreateInfo; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier; const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags = {}; - uint32_t patchControlPoints = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t srcQueueFamilyIndex = {}; + uint32_t dstQueueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; + + }; + static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageMemoryBarrier; + }; + + class BufferView + { + public: + using CType = VkBufferView; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferView; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView; + + public: + VULKAN_HPP_CONSTEXPR BufferView() VULKAN_HPP_NOEXCEPT + : m_bufferView(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_bufferView(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT + : m_bufferView( bufferView ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + BufferView & operator=(VkBufferView bufferView) VULKAN_HPP_NOEXCEPT + { + m_bufferView = bufferView; + return *this; + } +#endif + + BufferView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_bufferView = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BufferView const& ) const = default; +#else + bool operator==( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_bufferView == rhs.m_bufferView; + } + + bool operator!=(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_bufferView != rhs.m_bufferView; + } + + bool operator<(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_bufferView < rhs.m_bufferView; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const VULKAN_HPP_NOEXCEPT + { + return m_bufferView; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_bufferView != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_bufferView == VK_NULL_HANDLE; + } + + private: + VkBufferView m_bufferView; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::BufferView ) == sizeof( VkBufferView ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::BufferView; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::BufferView; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::BufferView; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct WriteDescriptorSet + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSet; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteDescriptorSet(VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ = {}, const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ = {}) VULKAN_HPP_NOEXCEPT + : dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( descriptorCount_ ), descriptorType( descriptorType_ ), pImageInfo( pImageInfo_ ), pBufferInfo( pBufferInfo_ ), pTexelBufferView( pTexelBufferView_ ) + {} + + VULKAN_HPP_CONSTEXPR WriteDescriptorSet( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_, uint32_t dstBinding_, uint32_t dstArrayElement_, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferInfo_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & texelBufferView_ = {} ) + : dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( static_cast( !imageInfo_.empty() ? imageInfo_.size() : !bufferInfo_.empty() ? bufferInfo_.size() : texelBufferView_.size() ) ), descriptorType( descriptorType_ ), pImageInfo( imageInfo_.data() ), pBufferInfo( bufferInfo_.data() ), pTexelBufferView( texelBufferView_.data() ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) == 1 ); +#else + if ( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) != 1 ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::WriteDescriptorSet::WriteDescriptorSet: ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) != 1" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + WriteDescriptorSet & operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + WriteDescriptorSet & operator=( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( WriteDescriptorSet ) ); + return *this; + } + + WriteDescriptorSet & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + WriteDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT + { + dstSet = dstSet_; + return *this; + } + + WriteDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT + { + dstBinding = dstBinding_; + return *this; + } + + WriteDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT + { + dstArrayElement = dstArrayElement_; + return *this; + } + + WriteDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = descriptorCount_; + return *this; + } + + WriteDescriptorSet & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + { + descriptorType = descriptorType_; + return *this; + } + + WriteDescriptorSet & setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ ) VULKAN_HPP_NOEXCEPT + { + pImageInfo = pImageInfo_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + WriteDescriptorSet & setImageInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageInfo_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = static_cast( imageInfo_.size() ); + pImageInfo = imageInfo_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + WriteDescriptorSet & setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ ) VULKAN_HPP_NOEXCEPT + { + pBufferInfo = pBufferInfo_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + WriteDescriptorSet & setBufferInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferInfo_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = static_cast( bufferInfo_.size() ); + pBufferInfo = bufferInfo_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + WriteDescriptorSet & setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT + { + pTexelBufferView = pTexelBufferView_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + WriteDescriptorSet & setTexelBufferView( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & texelBufferView_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = static_cast( texelBufferView_.size() ); + pTexelBufferView = texelBufferView_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkWriteDescriptorSet const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( WriteDescriptorSet const& ) const = default; +#else + bool operator==( WriteDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( dstSet == rhs.dstSet ) + && ( dstBinding == rhs.dstBinding ) + && ( dstArrayElement == rhs.dstArrayElement ) + && ( descriptorCount == rhs.descriptorCount ) + && ( descriptorType == rhs.descriptorType ) + && ( pImageInfo == rhs.pImageInfo ) + && ( pBufferInfo == rhs.pBufferInfo ) + && ( pTexelBufferView == rhs.pTexelBufferView ); + } + + bool operator!=( WriteDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {}; + uint32_t dstBinding = {}; + uint32_t dstArrayElement = {}; + uint32_t descriptorCount = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo = {}; + const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo = {}; + const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView = {}; + + }; + static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = WriteDescriptorSet; + }; + + class DescriptorUpdateTemplate + { + public: + using CType = VkDescriptorUpdateTemplate; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate; + + public: + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate() VULKAN_HPP_NOEXCEPT + : m_descriptorUpdateTemplate(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_descriptorUpdateTemplate(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT + : m_descriptorUpdateTemplate( descriptorUpdateTemplate ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DescriptorUpdateTemplate & operator=(VkDescriptorUpdateTemplate descriptorUpdateTemplate) VULKAN_HPP_NOEXCEPT + { + m_descriptorUpdateTemplate = descriptorUpdateTemplate; + return *this; + } +#endif + + DescriptorUpdateTemplate & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorUpdateTemplate = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorUpdateTemplate const& ) const = default; +#else + bool operator==( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate == rhs.m_descriptorUpdateTemplate; + } + + bool operator!=(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate != rhs.m_descriptorUpdateTemplate; + } + + bool operator<(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate < rhs.m_descriptorUpdateTemplate; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplate() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate == VK_NULL_HANDLE; + } + + private: + VkDescriptorUpdateTemplate m_descriptorUpdateTemplate; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + using DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate; + + class Event + { + public: + using CType = VkEvent; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eEvent; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent; + + public: + VULKAN_HPP_CONSTEXPR Event() VULKAN_HPP_NOEXCEPT + : m_event(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Event( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_event(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event ) VULKAN_HPP_NOEXCEPT + : m_event( event ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Event & operator=(VkEvent event) VULKAN_HPP_NOEXCEPT + { + m_event = event; + return *this; + } +#endif + + Event & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_event = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Event const& ) const = default; +#else + bool operator==( Event const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_event == rhs.m_event; + } + + bool operator!=(Event const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_event != rhs.m_event; + } + + bool operator<(Event const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_event < rhs.m_event; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const VULKAN_HPP_NOEXCEPT + { + return m_event; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_event != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_event == VK_NULL_HANDLE; + } + + private: + VkEvent m_event; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Event ) == sizeof( VkEvent ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Event; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Event; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Event; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct ImageResolve + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageResolve(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageResolve( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ImageResolve & operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageResolve & operator=( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImageResolve ) ); + return *this; + } + + ImageResolve & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + ImageResolve & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + ImageResolve & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + ImageResolve & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + ImageResolve & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + + operator VkImageResolve const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageResolve &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageResolve const& ) const = default; +#else + bool operator==( ImageResolve const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( srcSubresource == rhs.srcSubresource ) + && ( srcOffset == rhs.srcOffset ) + && ( dstSubresource == rhs.dstSubresource ) + && ( dstOffset == rhs.dstOffset ) + && ( extent == rhs.extent ); + } + + bool operator!=( ImageResolve const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + + }; + static_assert( sizeof( ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageResolve2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageResolve2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageResolve2KHR(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageResolve2KHR( ImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageResolve2KHR( VkImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ImageResolve2KHR & operator=( VkImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageResolve2KHR & operator=( ImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImageResolve2KHR ) ); + return *this; + } + + ImageResolve2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageResolve2KHR & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + ImageResolve2KHR & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + ImageResolve2KHR & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + ImageResolve2KHR & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + ImageResolve2KHR & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + + operator VkImageResolve2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageResolve2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageResolve2KHR const& ) const = default; +#else + bool operator==( ImageResolve2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcSubresource == rhs.srcSubresource ) + && ( srcOffset == rhs.srcOffset ) + && ( dstSubresource == rhs.dstSubresource ) + && ( dstOffset == rhs.dstOffset ) + && ( extent == rhs.extent ); + } + + bool operator!=( ImageResolve2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageResolve2KHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + + }; + static_assert( sizeof( ImageResolve2KHR ) == sizeof( VkImageResolve2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageResolve2KHR; + }; + + struct ResolveImageInfo2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eResolveImageInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ResolveImageInfo2KHR(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageResolve2KHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT + : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ ) + {} + + VULKAN_HPP_CONSTEXPR ResolveImageInfo2KHR( ResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ResolveImageInfo2KHR( VkResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + ResolveImageInfo2KHR( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast( regions_.size() ) ), pRegions( regions_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ResolveImageInfo2KHR & operator=( VkResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ResolveImageInfo2KHR & operator=( ResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ResolveImageInfo2KHR ) ); + return *this; + } + + ResolveImageInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ResolveImageInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + ResolveImageInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + ResolveImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + ResolveImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + ResolveImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + ResolveImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::ImageResolve2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + ResolveImageInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkResolveImageInfo2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkResolveImageInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ResolveImageInfo2KHR const& ) const = default; +#else + bool operator==( ResolveImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcImage == rhs.srcImage ) + && ( srcImageLayout == rhs.srcImageLayout ) + && ( dstImage == rhs.dstImage ) + && ( dstImageLayout == rhs.dstImageLayout ) + && ( regionCount == rhs.regionCount ) + && ( pRegions == rhs.pRegions ); + } + + bool operator!=( ResolveImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eResolveImageInfo2KHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageResolve2KHR* pRegions = {}; + + }; + static_assert( sizeof( ResolveImageInfo2KHR ) == sizeof( VkResolveImageInfo2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ResolveImageInfo2KHR; + }; + + struct PerformanceMarkerInfoINTEL + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceMarkerInfoINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL(uint64_t marker_ = {}) VULKAN_HPP_NOEXCEPT + : marker( marker_ ) + {} + + VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PerformanceMarkerInfoINTEL & operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PerformanceMarkerInfoINTEL & operator=( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PerformanceMarkerInfoINTEL ) ); + return *this; + } + + PerformanceMarkerInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PerformanceMarkerInfoINTEL & setMarker( uint64_t marker_ ) VULKAN_HPP_NOEXCEPT + { + marker = marker_; + return *this; + } + + + operator VkPerformanceMarkerInfoINTEL const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PerformanceMarkerInfoINTEL const& ) const = default; +#else + bool operator==( PerformanceMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( marker == rhs.marker ); + } + + bool operator!=( PerformanceMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL; + const void* pNext = {}; + uint64_t marker = {}; + + }; + static_assert( sizeof( PerformanceMarkerInfoINTEL ) == sizeof( VkPerformanceMarkerInfoINTEL ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PerformanceMarkerInfoINTEL; + }; + + struct PerformanceOverrideInfoINTEL + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceOverrideInfoINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL(VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware, VULKAN_HPP_NAMESPACE::Bool32 enable_ = {}, uint64_t parameter_ = {}) VULKAN_HPP_NOEXCEPT + : type( type_ ), enable( enable_ ), parameter( parameter_ ) + {} + + VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PerformanceOverrideInfoINTEL & operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PerformanceOverrideInfoINTEL & operator=( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PerformanceOverrideInfoINTEL ) ); + return *this; + } + + PerformanceOverrideInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PerformanceOverrideInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + PerformanceOverrideInfoINTEL & setEnable( VULKAN_HPP_NAMESPACE::Bool32 enable_ ) VULKAN_HPP_NOEXCEPT + { + enable = enable_; + return *this; + } + + PerformanceOverrideInfoINTEL & setParameter( uint64_t parameter_ ) VULKAN_HPP_NOEXCEPT + { + parameter = parameter_; + return *this; + } + + + operator VkPerformanceOverrideInfoINTEL const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PerformanceOverrideInfoINTEL const& ) const = default; +#else + bool operator==( PerformanceOverrideInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( type == rhs.type ) + && ( enable == rhs.enable ) + && ( parameter == rhs.parameter ); + } + + bool operator!=( PerformanceOverrideInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware; + VULKAN_HPP_NAMESPACE::Bool32 enable = {}; + uint64_t parameter = {}; + + }; + static_assert( sizeof( PerformanceOverrideInfoINTEL ) == sizeof( VkPerformanceOverrideInfoINTEL ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PerformanceOverrideInfoINTEL; + }; + + struct PerformanceStreamMarkerInfoINTEL + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceStreamMarkerInfoINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL(uint32_t marker_ = {}) VULKAN_HPP_NOEXCEPT + : marker( marker_ ) + {} + + VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PerformanceStreamMarkerInfoINTEL & operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PerformanceStreamMarkerInfoINTEL & operator=( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PerformanceStreamMarkerInfoINTEL ) ); + return *this; + } + + PerformanceStreamMarkerInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PerformanceStreamMarkerInfoINTEL & setMarker( uint32_t marker_ ) VULKAN_HPP_NOEXCEPT + { + marker = marker_; + return *this; + } + + + operator VkPerformanceStreamMarkerInfoINTEL const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PerformanceStreamMarkerInfoINTEL const& ) const = default; +#else + bool operator==( PerformanceStreamMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( marker == rhs.marker ); + } + + bool operator!=( PerformanceStreamMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL; + const void* pNext = {}; + uint32_t marker = {}; + + }; + static_assert( sizeof( PerformanceStreamMarkerInfoINTEL ) == sizeof( VkPerformanceStreamMarkerInfoINTEL ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PerformanceStreamMarkerInfoINTEL; }; - static_assert( sizeof( PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct Viewport { - VULKAN_HPP_CONSTEXPR Viewport( float x_ = {}, - float y_ = {}, - float width_ = {}, - float height_ = {}, - float minDepth_ = {}, - float maxDepth_ = {} ) VULKAN_HPP_NOEXCEPT - : x( x_ ) - , y( y_ ) - , width( width_ ) - , height( height_ ) - , minDepth( minDepth_ ) - , maxDepth( maxDepth_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Viewport(float x_ = {}, float y_ = {}, float width_ = {}, float height_ = {}, float minDepth_ = {}, float maxDepth_ = {}) VULKAN_HPP_NOEXCEPT + : x( x_ ), y( y_ ), width( width_ ), height( height_ ), minDepth( minDepth_ ), maxDepth( maxDepth_ ) {} + VULKAN_HPP_CONSTEXPR Viewport( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + Viewport( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - Viewport& operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT + Viewport & operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + Viewport & operator=( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( Viewport ) ); return *this; } @@ -34478,6 +34341,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkViewport const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -34488,6 +34352,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Viewport const& ) const = default; +#else bool operator==( Viewport const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( x == rhs.x ) @@ -34502,6 +34370,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: float x = {}; @@ -34510,38 +34381,2433 @@ namespace VULKAN_HPP_NAMESPACE float height = {}; float minDepth = {}; float maxDepth = {}; + }; static_assert( sizeof( Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct PipelineViewportStateCreateInfo + struct ShadingRatePaletteNV { - VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ = {}, - uint32_t viewportCount_ = {}, - const VULKAN_HPP_NAMESPACE::Viewport* pViewports_ = {}, - uint32_t scissorCount_ = {}, - const VULKAN_HPP_NAMESPACE::Rect2D* pScissors_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , viewportCount( viewportCount_ ) - , pViewports( pViewports_ ) - , scissorCount( scissorCount_ ) - , pScissors( pScissors_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV(uint32_t shadingRatePaletteEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ = {}) VULKAN_HPP_NOEXCEPT + : shadingRatePaletteEntryCount( shadingRatePaletteEntryCount_ ), pShadingRatePaletteEntries( pShadingRatePaletteEntries_ ) {} - VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo ) - offsetof( PipelineViewportStateCreateInfo, pNext ) ); + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + ShadingRatePaletteNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & shadingRatePaletteEntries_ ) + : shadingRatePaletteEntryCount( static_cast( shadingRatePaletteEntries_.size() ) ), pShadingRatePaletteEntries( shadingRatePaletteEntries_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ShadingRatePaletteNV & operator=( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } + ShadingRatePaletteNV & operator=( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ShadingRatePaletteNV ) ); + return *this; + } + + ShadingRatePaletteNV & setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_; + return *this; + } + + ShadingRatePaletteNV & setPShadingRatePaletteEntries( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT + { + pShadingRatePaletteEntries = pShadingRatePaletteEntries_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + ShadingRatePaletteNV & setShadingRatePaletteEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & shadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT + { + shadingRatePaletteEntryCount = static_cast( shadingRatePaletteEntries_.size() ); + pShadingRatePaletteEntries = shadingRatePaletteEntries_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkShadingRatePaletteNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ShadingRatePaletteNV const& ) const = default; +#else + bool operator==( ShadingRatePaletteNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount ) + && ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries ); + } + + bool operator!=( ShadingRatePaletteNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + uint32_t shadingRatePaletteEntryCount = {}; + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries = {}; + + }; + static_assert( sizeof( ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ViewportWScalingNV + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ViewportWScalingNV(float xcoeff_ = {}, float ycoeff_ = {}) VULKAN_HPP_NOEXCEPT + : xcoeff( xcoeff_ ), ycoeff( ycoeff_ ) + {} + + VULKAN_HPP_CONSTEXPR ViewportWScalingNV( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ViewportWScalingNV( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ViewportWScalingNV & operator=( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ViewportWScalingNV & operator=( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ViewportWScalingNV ) ); + return *this; + } + + ViewportWScalingNV & setXcoeff( float xcoeff_ ) VULKAN_HPP_NOEXCEPT + { + xcoeff = xcoeff_; + return *this; + } + + ViewportWScalingNV & setYcoeff( float ycoeff_ ) VULKAN_HPP_NOEXCEPT + { + ycoeff = ycoeff_; + return *this; + } + + + operator VkViewportWScalingNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ViewportWScalingNV const& ) const = default; +#else + bool operator==( ViewportWScalingNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( xcoeff == rhs.xcoeff ) + && ( ycoeff == rhs.ycoeff ); + } + + bool operator!=( ViewportWScalingNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + float xcoeff = {}; + float ycoeff = {}; + + }; + static_assert( sizeof( ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct StridedBufferRegionKHR + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR StridedBufferRegionKHR(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ), offset( offset_ ), stride( stride_ ), size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR StridedBufferRegionKHR( StridedBufferRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + StridedBufferRegionKHR( VkStridedBufferRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + explicit StridedBufferRegionKHR( IndirectCommandsStreamNV const& indirectCommandsStreamNV, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) + : buffer( indirectCommandsStreamNV.buffer ) + , offset( indirectCommandsStreamNV.offset ) + , stride( stride_ ) + , size( size_ ) + {} +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + StridedBufferRegionKHR & operator=( VkStridedBufferRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + StridedBufferRegionKHR & operator=( StridedBufferRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( StridedBufferRegionKHR ) ); + return *this; + } + + StridedBufferRegionKHR & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + StridedBufferRegionKHR & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + StridedBufferRegionKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + StridedBufferRegionKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + + operator VkStridedBufferRegionKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkStridedBufferRegionKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( StridedBufferRegionKHR const& ) const = default; +#else + bool operator==( StridedBufferRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( buffer == rhs.buffer ) + && ( offset == rhs.offset ) + && ( stride == rhs.stride ) + && ( size == rhs.size ); + } + + bool operator!=( StridedBufferRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize stride = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + + }; + static_assert( sizeof( StridedBufferRegionKHR ) == sizeof( VkStridedBufferRegionKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + class CommandBuffer + { + public: + using CType = VkCommandBuffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer; + + public: + VULKAN_HPP_CONSTEXPR CommandBuffer() VULKAN_HPP_NOEXCEPT + : m_commandBuffer(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_commandBuffer(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT CommandBuffer( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT + : m_commandBuffer( commandBuffer ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + CommandBuffer & operator=(VkCommandBuffer commandBuffer) VULKAN_HPP_NOEXCEPT + { + m_commandBuffer = commandBuffer; + return *this; + } +#endif + + CommandBuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_commandBuffer = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CommandBuffer const& ) const = default; +#else + bool operator==( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer == rhs.m_commandBuffer; + } + + bool operator!=(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer != rhs.m_commandBuffer; + } + + bool operator<(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer < rhs.m_commandBuffer; + } +#endif + + template + VULKAN_HPP_NODISCARD Result begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy const &counterBuffers, ArrayProxy const &counterBufferOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy const &descriptorSets, ArrayProxy const &dynamicOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy const &buffers, ArrayProxy const &offsets, ArrayProxy const &sizes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindVertexBuffers( uint32_t firstBinding, ArrayProxy const &buffers, ArrayProxy const &offsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void bindVertexBuffers2EXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, const VULKAN_HPP_NAMESPACE::DeviceSize* pStrides, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindVertexBuffers2EXT( uint32_t firstBinding, ArrayProxy const &buffers, ArrayProxy const &offsets, ArrayProxy const &sizes, ArrayProxy const &strides, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy const ®ions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR* pBlitImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void blitImage2KHR( const BlitImageInfo2KHR & blitImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + void buildAccelerationStructureIndirectKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void buildAccelerationStructureIndirectKHR( const AccelerationStructureBuildGeometryInfoKHR & info, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + void buildAccelerationStructureKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const * ppOffsetInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void buildAccelerationStructureKHR( ArrayProxy const &infos, ArrayProxy const &pOffsetInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment* pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect* pRects, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void clearAttachments( ArrayProxy const &attachments, ArrayProxy const &rects, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue* pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy const &ranges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy const &ranges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy const ®ions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR* pCopyBufferInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBuffer2KHR( const CopyBufferInfo2KHR & copyBufferInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy const ®ions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR* pCopyBufferToImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBufferToImage2KHR( const CopyBufferToImageInfo2KHR & copyBufferToImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy const ®ions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR* pCopyImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImage2KHR( const CopyImageInfo2KHR & copyImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy const ®ions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR* pCopyImageToBufferInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImageToBuffer2KHR( const CopyImageToBufferInfo2KHR & copyImageToBufferInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void debugMarkerEndEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void debugMarkerEndEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endConditionalRenderingEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void endConditionalRenderingEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endDebugUtilsLabelEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void endDebugUtilsLabelEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endRenderPass(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void endRenderPass(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endRenderPass2( const SubpassEndInfo & subpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy const &counterBuffers, ArrayProxy const &counterBufferOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void executeCommands( ArrayProxy const &commandBuffers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy const &memoryBarriers, ArrayProxy const &bufferMemoryBarriers, ArrayProxy const &imageMemoryBarriers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy const &values, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy const &descriptorWrites, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve* pRegions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy const ®ions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR* pResolveImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void resolveImage2KHR( const ResolveImageInfo2KHR & resolveImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setBlendConstants( const float blendConstants[4], Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void setBlendConstants( const float blendConstants[4], Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy const &customSampleOrders, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setDeviceMask( uint32_t deviceMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void setDeviceMask( uint32_t deviceMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy const &discardRectangles, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy const &exclusiveScissors, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setLineWidth( float lineWidth, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void setLineWidth( float lineWidth, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setScissor( uint32_t firstScissor, ArrayProxy const &scissors, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setScissorWithCountEXT( ArrayProxy const &scissors, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewport( uint32_t firstViewport, ArrayProxy const &viewports, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy const &shadingRatePalettes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewportWScalingNV( uint32_t firstViewport, ArrayProxy const &viewportWScalings, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setViewportWithCountEXT( uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewportWithCountEXT( ArrayProxy const &viewports, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void traceRaysIndirectKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void traceRaysKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy const &data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void waitEvents( ArrayProxy const &events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy const &memoryBarriers, ArrayProxy const &bufferMemoryBarriers, ArrayProxy const &imageMemoryBarriers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + void writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void writeAccelerationStructuresPropertiesKHR( ArrayProxy const &accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void writeAccelerationStructuresPropertiesNV( ArrayProxy const &accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result end(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type end(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer == VK_NULL_HANDLE; + } + + private: + VkCommandBuffer m_commandBuffer; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::CommandBuffer; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CommandBuffer; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CommandBuffer; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct MemoryAllocateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryAllocateInfo(VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, uint32_t memoryTypeIndex_ = {}) VULKAN_HPP_NOEXCEPT + : allocationSize( allocationSize_ ), memoryTypeIndex( memoryTypeIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + MemoryAllocateInfo & operator=( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryAllocateInfo & operator=( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( MemoryAllocateInfo ) ); + return *this; + } + + MemoryAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryAllocateInfo & setAllocationSize( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ ) VULKAN_HPP_NOEXCEPT + { + allocationSize = allocationSize_; + return *this; + } + + MemoryAllocateInfo & setMemoryTypeIndex( uint32_t memoryTypeIndex_ ) VULKAN_HPP_NOEXCEPT + { + memoryTypeIndex = memoryTypeIndex_; + return *this; + } + + + operator VkMemoryAllocateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryAllocateInfo const& ) const = default; +#else + bool operator==( MemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( allocationSize == rhs.allocationSize ) + && ( memoryTypeIndex == rhs.memoryTypeIndex ); + } + + bool operator!=( MemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {}; + uint32_t memoryTypeIndex = {}; + + }; + static_assert( sizeof( MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryAllocateInfo; + }; + + class PipelineCache + { + public: + using CType = VkPipelineCache; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache; + + public: + VULKAN_HPP_CONSTEXPR PipelineCache() VULKAN_HPP_NOEXCEPT + : m_pipelineCache(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR PipelineCache( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_pipelineCache(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PipelineCache( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT + : m_pipelineCache( pipelineCache ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + PipelineCache & operator=(VkPipelineCache pipelineCache) VULKAN_HPP_NOEXCEPT + { + m_pipelineCache = pipelineCache; + return *this; + } +#endif + + PipelineCache & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_pipelineCache = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineCache const& ) const = default; +#else + bool operator==( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache == rhs.m_pipelineCache; + } + + bool operator!=(PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache != rhs.m_pipelineCache; + } + + bool operator<(PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache < rhs.m_pipelineCache; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineCache() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache == VK_NULL_HANDLE; + } + + private: + VkPipelineCache m_pipelineCache; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::PipelineCache ) == sizeof( VkPipelineCache ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::PipelineCache; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineCache; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineCache; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct EventCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eEventCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR EventCreateInfo(VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR EventCreateInfo( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + EventCreateInfo( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + EventCreateInfo & operator=( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + EventCreateInfo & operator=( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( EventCreateInfo ) ); + return *this; + } + + EventCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + EventCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + + operator VkEventCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkEventCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( EventCreateInfo const& ) const = default; +#else + bool operator==( EventCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ); + } + + bool operator!=( EventCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eEventCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::EventCreateFlags flags = {}; + + }; + static_assert( sizeof( EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = EventCreateInfo; + }; + + struct FenceCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FenceCreateInfo(VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR FenceCreateInfo( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FenceCreateInfo( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + FenceCreateInfo & operator=( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + FenceCreateInfo & operator=( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( FenceCreateInfo ) ); + return *this; + } + + FenceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + FenceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + + operator VkFenceCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFenceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( FenceCreateInfo const& ) const = default; +#else + bool operator==( FenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ); + } + + bool operator!=( FenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::FenceCreateFlags flags = {}; + + }; + static_assert( sizeof( FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = FenceCreateInfo; + }; + + struct FramebufferCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FramebufferCreateInfo(VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ = {}, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t layers_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), renderPass( renderPass_ ), attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ ), width( width_ ), height( height_ ), layers( layers_ ) + {} + + VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_, VULKAN_HPP_NAMESPACE::RenderPass renderPass_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t layers_ = {} ) + : flags( flags_ ), renderPass( renderPass_ ), attachmentCount( static_cast( attachments_.size() ) ), pAttachments( attachments_.data() ), width( width_ ), height( height_ ), layers( layers_ ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + FramebufferCreateInfo & operator=( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + FramebufferCreateInfo & operator=( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( FramebufferCreateInfo ) ); + return *this; + } + + FramebufferCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + FramebufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + FramebufferCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT + { + renderPass = renderPass_; + return *this; + } + + FramebufferCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + FramebufferCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + FramebufferCreateInfo & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + FramebufferCreateInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + FramebufferCreateInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + FramebufferCreateInfo & setLayers( uint32_t layers_ ) VULKAN_HPP_NOEXCEPT + { + layers = layers_; + return *this; + } + + + operator VkFramebufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFramebufferCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( FramebufferCreateInfo const& ) const = default; +#else + bool operator==( FramebufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( renderPass == rhs.renderPass ) + && ( attachmentCount == rhs.attachmentCount ) + && ( pAttachments == rhs.pAttachments ) + && ( width == rhs.width ) + && ( height == rhs.height ) + && ( layers == rhs.layers ); + } + + bool operator!=( FramebufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::ImageView* pAttachments = {}; + uint32_t width = {}; + uint32_t height = {}; + uint32_t layers = {}; + + }; + static_assert( sizeof( FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = FramebufferCreateInfo; + }; + + struct VertexInputBindingDescription + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VertexInputBindingDescription(uint32_t binding_ = {}, uint32_t stride_ = {}, VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex) VULKAN_HPP_NOEXCEPT + : binding( binding_ ), stride( stride_ ), inputRate( inputRate_ ) + {} + + VULKAN_HPP_CONSTEXPR VertexInputBindingDescription( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + VertexInputBindingDescription & operator=( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VertexInputBindingDescription & operator=( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( VertexInputBindingDescription ) ); + return *this; + } + + VertexInputBindingDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VertexInputBindingDescription & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + VertexInputBindingDescription & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT + { + inputRate = inputRate_; + return *this; + } + + + operator VkVertexInputBindingDescription const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVertexInputBindingDescription &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( VertexInputBindingDescription const& ) const = default; +#else + bool operator==( VertexInputBindingDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( binding == rhs.binding ) + && ( stride == rhs.stride ) + && ( inputRate == rhs.inputRate ); + } + + bool operator!=( VertexInputBindingDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + uint32_t binding = {}; + uint32_t stride = {}; + VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex; + + }; + static_assert( sizeof( VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct VertexInputAttributeDescription + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription(uint32_t location_ = {}, uint32_t binding_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint32_t offset_ = {}) VULKAN_HPP_NOEXCEPT + : location( location_ ), binding( binding_ ), format( format_ ), offset( offset_ ) + {} + + VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + VertexInputAttributeDescription & operator=( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VertexInputAttributeDescription & operator=( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( VertexInputAttributeDescription ) ); + return *this; + } + + VertexInputAttributeDescription & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT + { + location = location_; + return *this; + } + + VertexInputAttributeDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VertexInputAttributeDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VertexInputAttributeDescription & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + + operator VkVertexInputAttributeDescription const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVertexInputAttributeDescription &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( VertexInputAttributeDescription const& ) const = default; +#else + bool operator==( VertexInputAttributeDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( location == rhs.location ) + && ( binding == rhs.binding ) + && ( format == rhs.format ) + && ( offset == rhs.offset ); + } + + bool operator!=( VertexInputAttributeDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + uint32_t location = {}; + uint32_t binding = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint32_t offset = {}; + + }; + static_assert( sizeof( VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineVertexInputStateCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ = {}, uint32_t vertexBindingDescriptionCount_ = {}, const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions_ = {}, uint32_t vertexAttributeDescriptionCount_ = {}, const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), vertexBindingDescriptionCount( vertexBindingDescriptionCount_ ), pVertexBindingDescriptions( pVertexBindingDescriptions_ ), vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ ), pVertexAttributeDescriptions( pVertexAttributeDescriptions_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineVertexInputStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexBindingDescriptions_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexAttributeDescriptions_ = {} ) + : flags( flags_ ), vertexBindingDescriptionCount( static_cast( vertexBindingDescriptions_.size() ) ), pVertexBindingDescriptions( vertexBindingDescriptions_.data() ), vertexAttributeDescriptionCount( static_cast( vertexAttributeDescriptions_.size() ) ), pVertexAttributeDescriptions( vertexAttributeDescriptions_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PipelineVertexInputStateCreateInfo & operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineVertexInputStateCreateInfo & operator=( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineVertexInputStateCreateInfo ) ); + return *this; + } + + PipelineVertexInputStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineVertexInputStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineVertexInputStateCreateInfo & setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexBindingDescriptionCount = vertexBindingDescriptionCount_; + return *this; + } + + PipelineVertexInputStateCreateInfo & setPVertexBindingDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT + { + pVertexBindingDescriptions = pVertexBindingDescriptions_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineVertexInputStateCreateInfo & setVertexBindingDescriptions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT + { + vertexBindingDescriptionCount = static_cast( vertexBindingDescriptions_.size() ); + pVertexBindingDescriptions = vertexBindingDescriptions_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_; + return *this; + } + + PipelineVertexInputStateCreateInfo & setPVertexAttributeDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT + { + pVertexAttributeDescriptions = pVertexAttributeDescriptions_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeDescriptionCount = static_cast( vertexAttributeDescriptions_.size() ); + pVertexAttributeDescriptions = vertexAttributeDescriptions_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkPipelineVertexInputStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineVertexInputStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineVertexInputStateCreateInfo const& ) const = default; +#else + bool operator==( PipelineVertexInputStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount ) + && ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions ) + && ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount ) + && ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions ); + } + + bool operator!=( PipelineVertexInputStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags = {}; + uint32_t vertexBindingDescriptionCount = {}; + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions = {}; + uint32_t vertexAttributeDescriptionCount = {}; + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions = {}; + + }; + static_assert( sizeof( PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineVertexInputStateCreateInfo; + }; + + struct PipelineInputAssemblyStateCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInputAssemblyStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList, VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), topology( topology_ ), primitiveRestartEnable( primitiveRestartEnable_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PipelineInputAssemblyStateCreateInfo & operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineInputAssemblyStateCreateInfo & operator=( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineInputAssemblyStateCreateInfo ) ); + return *this; + } + + PipelineInputAssemblyStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineInputAssemblyStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineInputAssemblyStateCreateInfo & setTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ ) VULKAN_HPP_NOEXCEPT + { + topology = topology_; + return *this; + } + + PipelineInputAssemblyStateCreateInfo & setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ ) VULKAN_HPP_NOEXCEPT + { + primitiveRestartEnable = primitiveRestartEnable_; + return *this; + } + + + operator VkPipelineInputAssemblyStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineInputAssemblyStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineInputAssemblyStateCreateInfo const& ) const = default; +#else + bool operator==( PipelineInputAssemblyStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( topology == rhs.topology ) + && ( primitiveRestartEnable == rhs.primitiveRestartEnable ); + } + + bool operator!=( PipelineInputAssemblyStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::PrimitiveTopology topology = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList; + VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable = {}; + + }; + static_assert( sizeof( PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineInputAssemblyStateCreateInfo; + }; + + struct PipelineTessellationStateCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ = {}, uint32_t patchControlPoints_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), patchControlPoints( patchControlPoints_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PipelineTessellationStateCreateInfo & operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineTessellationStateCreateInfo & operator=( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineTessellationStateCreateInfo ) ); + return *this; + } + + PipelineTessellationStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineTessellationStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineTessellationStateCreateInfo & setPatchControlPoints( uint32_t patchControlPoints_ ) VULKAN_HPP_NOEXCEPT + { + patchControlPoints = patchControlPoints_; + return *this; + } + + + operator VkPipelineTessellationStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineTessellationStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineTessellationStateCreateInfo const& ) const = default; +#else + bool operator==( PipelineTessellationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( patchControlPoints == rhs.patchControlPoints ); + } + + bool operator!=( PipelineTessellationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationStateCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags = {}; + uint32_t patchControlPoints = {}; + + }; + static_assert( sizeof( PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineTessellationStateCreateInfo; + }; + + struct PipelineViewportStateCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::Viewport* pViewports_ = {}, uint32_t scissorCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), viewportCount( viewportCount_ ), pViewports( pViewports_ ), scissorCount( scissorCount_ ), pScissors( pScissors_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineViewportStateCreateInfo& operator=( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewports_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & scissors_ = {} ) + : flags( flags_ ), viewportCount( static_cast( viewports_.size() ) ), pViewports( viewports_.data() ), scissorCount( static_cast( scissors_.size() ) ), pScissors( scissors_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PipelineViewportStateCreateInfo & operator=( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineViewportStateCreateInfo & operator=( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineViewportStateCreateInfo ) ); return *this; } @@ -34569,6 +36835,15 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineViewportStateCreateInfo & setViewports( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewports_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = static_cast( viewports_.size() ); + pViewports = viewports_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineViewportStateCreateInfo & setScissorCount( uint32_t scissorCount_ ) VULKAN_HPP_NOEXCEPT { scissorCount = scissorCount_; @@ -34581,6 +36856,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineViewportStateCreateInfo & setScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & scissors_ ) VULKAN_HPP_NOEXCEPT + { + scissorCount = static_cast( scissors_.size() ); + pScissors = scissors_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkPipelineViewportStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -34591,6 +36876,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineViewportStateCreateInfo const& ) const = default; +#else bool operator==( PipelineViewportStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -34606,6 +36895,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportStateCreateInfo; @@ -34615,50 +36907,44 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::Viewport* pViewports = {}; uint32_t scissorCount = {}; const VULKAN_HPP_NAMESPACE::Rect2D* pScissors = {}; + }; static_assert( sizeof( PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PipelineViewportStateCreateInfo; + }; + struct PipelineRasterizationStateCreateInfo { - VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ = {}, - VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ = VULKAN_HPP_NAMESPACE::PolygonMode::eFill, - VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ = {}, - VULKAN_HPP_NAMESPACE::FrontFace frontFace_ = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise, - VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ = {}, - float depthBiasConstantFactor_ = {}, - float depthBiasClamp_ = {}, - float depthBiasSlopeFactor_ = {}, - float lineWidth_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , depthClampEnable( depthClampEnable_ ) - , rasterizerDiscardEnable( rasterizerDiscardEnable_ ) - , polygonMode( polygonMode_ ) - , cullMode( cullMode_ ) - , frontFace( frontFace_ ) - , depthBiasEnable( depthBiasEnable_ ) - , depthBiasConstantFactor( depthBiasConstantFactor_ ) - , depthBiasClamp( depthBiasClamp_ ) - , depthBiasSlopeFactor( depthBiasSlopeFactor_ ) - , lineWidth( lineWidth_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ = {}, VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ = VULKAN_HPP_NAMESPACE::PolygonMode::eFill, VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ = {}, VULKAN_HPP_NAMESPACE::FrontFace frontFace_ = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise, VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ = {}, float depthBiasConstantFactor_ = {}, float depthBiasClamp_ = {}, float depthBiasSlopeFactor_ = {}, float lineWidth_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), depthClampEnable( depthClampEnable_ ), rasterizerDiscardEnable( rasterizerDiscardEnable_ ), polygonMode( polygonMode_ ), cullMode( cullMode_ ), frontFace( frontFace_ ), depthBiasEnable( depthBiasEnable_ ), depthBiasConstantFactor( depthBiasConstantFactor_ ), depthBiasClamp( depthBiasClamp_ ), depthBiasSlopeFactor( depthBiasSlopeFactor_ ), lineWidth( lineWidth_ ) {} - VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo ) - offsetof( PipelineRasterizationStateCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PipelineRasterizationStateCreateInfo& operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateCreateInfo & operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineRasterizationStateCreateInfo & operator=( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineRasterizationStateCreateInfo ) ); return *this; } @@ -34734,6 +37020,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPipelineRasterizationStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -34744,6 +37031,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineRasterizationStateCreateInfo const& ) const = default; +#else bool operator==( PipelineRasterizationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -34765,6 +37056,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo; @@ -34780,42 +37074,44 @@ namespace VULKAN_HPP_NAMESPACE float depthBiasClamp = {}; float depthBiasSlopeFactor = {}; float lineWidth = {}; + }; static_assert( sizeof( PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PipelineRasterizationStateCreateInfo; + }; + struct PipelineMultisampleStateCreateInfo { - VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ = {}, - float minSampleShading_ = {}, - const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , rasterizationSamples( rasterizationSamples_ ) - , sampleShadingEnable( sampleShadingEnable_ ) - , minSampleShading( minSampleShading_ ) - , pSampleMask( pSampleMask_ ) - , alphaToCoverageEnable( alphaToCoverageEnable_ ) - , alphaToOneEnable( alphaToOneEnable_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineMultisampleStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ = {}, float minSampleShading_ = {}, const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), rasterizationSamples( rasterizationSamples_ ), sampleShadingEnable( sampleShadingEnable_ ), minSampleShading( minSampleShading_ ), pSampleMask( pSampleMask_ ), alphaToCoverageEnable( alphaToCoverageEnable_ ), alphaToOneEnable( alphaToOneEnable_ ) {} - VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo ) - offsetof( PipelineMultisampleStateCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PipelineMultisampleStateCreateInfo& operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineMultisampleStateCreateInfo & operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineMultisampleStateCreateInfo & operator=( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineMultisampleStateCreateInfo ) ); return *this; } @@ -34867,6 +37163,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPipelineMultisampleStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -34877,6 +37174,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineMultisampleStateCreateInfo const& ) const = default; +#else bool operator==( PipelineMultisampleStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -34894,6 +37195,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo; @@ -34905,36 +37209,43 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask = {}; VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable = {}; VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable = {}; + }; static_assert( sizeof( PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PipelineMultisampleStateCreateInfo; + }; + struct StencilOpState { - VULKAN_HPP_CONSTEXPR StencilOpState( VULKAN_HPP_NAMESPACE::StencilOp failOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, - VULKAN_HPP_NAMESPACE::StencilOp passOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, - VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, - VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, - uint32_t compareMask_ = {}, - uint32_t writeMask_ = {}, - uint32_t reference_ = {} ) VULKAN_HPP_NOEXCEPT - : failOp( failOp_ ) - , passOp( passOp_ ) - , depthFailOp( depthFailOp_ ) - , compareOp( compareOp_ ) - , compareMask( compareMask_ ) - , writeMask( writeMask_ ) - , reference( reference_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR StencilOpState(VULKAN_HPP_NAMESPACE::StencilOp failOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, VULKAN_HPP_NAMESPACE::StencilOp passOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, uint32_t compareMask_ = {}, uint32_t writeMask_ = {}, uint32_t reference_ = {}) VULKAN_HPP_NOEXCEPT + : failOp( failOp_ ), passOp( passOp_ ), depthFailOp( depthFailOp_ ), compareOp( compareOp_ ), compareMask( compareMask_ ), writeMask( writeMask_ ), reference( reference_ ) {} + VULKAN_HPP_CONSTEXPR StencilOpState( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default; + StencilOpState( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - StencilOpState& operator=( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT + StencilOpState & operator=( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + StencilOpState & operator=( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( StencilOpState ) ); return *this; } @@ -34980,6 +37291,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkStencilOpState const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -34990,6 +37302,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( StencilOpState const& ) const = default; +#else bool operator==( StencilOpState const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( failOp == rhs.failOp ) @@ -35005,6 +37321,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: VULKAN_HPP_NAMESPACE::StencilOp failOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; @@ -35014,48 +37333,38 @@ namespace VULKAN_HPP_NAMESPACE uint32_t compareMask = {}; uint32_t writeMask = {}; uint32_t reference = {}; + }; static_assert( sizeof( StencilOpState ) == sizeof( VkStencilOpState ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PipelineDepthStencilStateCreateInfo { - VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ = {}, - VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, - VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ = {}, - VULKAN_HPP_NAMESPACE::StencilOpState front_ = {}, - VULKAN_HPP_NAMESPACE::StencilOpState back_ = {}, - float minDepthBounds_ = {}, - float maxDepthBounds_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , depthTestEnable( depthTestEnable_ ) - , depthWriteEnable( depthWriteEnable_ ) - , depthCompareOp( depthCompareOp_ ) - , depthBoundsTestEnable( depthBoundsTestEnable_ ) - , stencilTestEnable( stencilTestEnable_ ) - , front( front_ ) - , back( back_ ) - , minDepthBounds( minDepthBounds_ ) - , maxDepthBounds( maxDepthBounds_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDepthStencilStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ = {}, VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ = {}, VULKAN_HPP_NAMESPACE::StencilOpState front_ = {}, VULKAN_HPP_NAMESPACE::StencilOpState back_ = {}, float minDepthBounds_ = {}, float maxDepthBounds_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), depthTestEnable( depthTestEnable_ ), depthWriteEnable( depthWriteEnable_ ), depthCompareOp( depthCompareOp_ ), depthBoundsTestEnable( depthBoundsTestEnable_ ), stencilTestEnable( stencilTestEnable_ ), front( front_ ), back( back_ ), minDepthBounds( minDepthBounds_ ), maxDepthBounds( maxDepthBounds_ ) {} - VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo ) - offsetof( PipelineDepthStencilStateCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PipelineDepthStencilStateCreateInfo& operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineDepthStencilStateCreateInfo & operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineDepthStencilStateCreateInfo & operator=( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineDepthStencilStateCreateInfo ) ); return *this; } @@ -35101,13 +37410,13 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PipelineDepthStencilStateCreateInfo & setFront( VULKAN_HPP_NAMESPACE::StencilOpState front_ ) VULKAN_HPP_NOEXCEPT + PipelineDepthStencilStateCreateInfo & setFront( VULKAN_HPP_NAMESPACE::StencilOpState const & front_ ) VULKAN_HPP_NOEXCEPT { front = front_; return *this; } - PipelineDepthStencilStateCreateInfo & setBack( VULKAN_HPP_NAMESPACE::StencilOpState back_ ) VULKAN_HPP_NOEXCEPT + PipelineDepthStencilStateCreateInfo & setBack( VULKAN_HPP_NAMESPACE::StencilOpState const & back_ ) VULKAN_HPP_NOEXCEPT { back = back_; return *this; @@ -35125,6 +37434,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPipelineDepthStencilStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -35135,6 +37445,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineDepthStencilStateCreateInfo const& ) const = default; +#else bool operator==( PipelineDepthStencilStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -35155,6 +37469,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo; @@ -35169,38 +37486,43 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::StencilOpState back = {}; float minDepthBounds = {}; float maxDepthBounds = {}; + }; static_assert( sizeof( PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PipelineDepthStencilStateCreateInfo; + }; + struct PipelineColorBlendAttachmentState { - VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ = {}, - VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, - VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, - VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, - VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, - VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, - VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, - VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ = {} ) VULKAN_HPP_NOEXCEPT - : blendEnable( blendEnable_ ) - , srcColorBlendFactor( srcColorBlendFactor_ ) - , dstColorBlendFactor( dstColorBlendFactor_ ) - , colorBlendOp( colorBlendOp_ ) - , srcAlphaBlendFactor( srcAlphaBlendFactor_ ) - , dstAlphaBlendFactor( dstAlphaBlendFactor_ ) - , alphaBlendOp( alphaBlendOp_ ) - , colorWriteMask( colorWriteMask_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState(VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ = {}, VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ = {}) VULKAN_HPP_NOEXCEPT + : blendEnable( blendEnable_ ), srcColorBlendFactor( srcColorBlendFactor_ ), dstColorBlendFactor( dstColorBlendFactor_ ), colorBlendOp( colorBlendOp_ ), srcAlphaBlendFactor( srcAlphaBlendFactor_ ), dstAlphaBlendFactor( dstAlphaBlendFactor_ ), alphaBlendOp( alphaBlendOp_ ), colorWriteMask( colorWriteMask_ ) {} + VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PipelineColorBlendAttachmentState& operator=( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAttachmentState & operator=( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineColorBlendAttachmentState & operator=( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineColorBlendAttachmentState ) ); return *this; } @@ -35252,6 +37574,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPipelineColorBlendAttachmentState const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -35262,6 +37585,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineColorBlendAttachmentState const& ) const = default; +#else bool operator==( PipelineColorBlendAttachmentState const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( blendEnable == rhs.blendEnable ) @@ -35278,6 +37605,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: VULKAN_HPP_NAMESPACE::Bool32 blendEnable = {}; @@ -35288,42 +37618,44 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask = {}; + }; static_assert( sizeof( PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PipelineColorBlendStateCreateInfo { - VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ = {}, - VULKAN_HPP_NAMESPACE::LogicOp logicOp_ = VULKAN_HPP_NAMESPACE::LogicOp::eClear, - uint32_t attachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments_ = {}, - std::array const& blendConstants_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , logicOpEnable( logicOpEnable_ ) - , logicOp( logicOp_ ) - , attachmentCount( attachmentCount_ ) - , pAttachments( pAttachments_ ) - , blendConstants{} - { - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( blendConstants, blendConstants_ ); - } + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendStateCreateInfo; - VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo ) - offsetof( PipelineColorBlendStateCreateInfo, pNext ) ); - return *this; - } +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ = {}, VULKAN_HPP_NAMESPACE::LogicOp logicOp_ = VULKAN_HPP_NAMESPACE::LogicOp::eClear, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments_ = {}, std::array const& blendConstants_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), logicOpEnable( logicOpEnable_ ), logicOp( logicOp_ ), attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ ), blendConstants( blendConstants_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineColorBlendStateCreateInfo& operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineColorBlendStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_, VULKAN_HPP_NAMESPACE::LogicOp logicOp_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_, std::array const& blendConstants_ = {} ) + : flags( flags_ ), logicOpEnable( logicOpEnable_ ), logicOp( logicOp_ ), attachmentCount( static_cast( attachments_.size() ) ), pAttachments( attachments_.data() ), blendConstants( blendConstants_ ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PipelineColorBlendStateCreateInfo & operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineColorBlendStateCreateInfo & operator=( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineColorBlendStateCreateInfo ) ); return *this; } @@ -35363,11 +37695,21 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PipelineColorBlendStateCreateInfo & setBlendConstants( std::array blendConstants_ ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineColorBlendStateCreateInfo & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) VULKAN_HPP_NOEXCEPT { - memcpy( blendConstants, blendConstants_.data(), 4 * sizeof( float ) ); + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); return *this; } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + PipelineColorBlendStateCreateInfo & setBlendConstants( std::array blendConstants_ ) VULKAN_HPP_NOEXCEPT + { + blendConstants = blendConstants_; + return *this; + } + operator VkPipelineColorBlendStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT { @@ -35379,6 +37721,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineColorBlendStateCreateInfo const& ) const = default; +#else bool operator==( PipelineColorBlendStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -35388,13 +37734,16 @@ namespace VULKAN_HPP_NAMESPACE && ( logicOp == rhs.logicOp ) && ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) - && ( memcmp( blendConstants, rhs.blendConstants, 4 * sizeof( float ) ) == 0 ); + && ( blendConstants == rhs.blendConstants ); } bool operator!=( PipelineColorBlendStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo; @@ -35404,35 +37753,51 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::LogicOp logicOp = VULKAN_HPP_NAMESPACE::LogicOp::eClear; uint32_t attachmentCount = {}; const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments = {}; - float blendConstants[4] = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D blendConstants = {}; + }; static_assert( sizeof( PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PipelineColorBlendStateCreateInfo; + }; + struct PipelineDynamicStateCreateInfo { - VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ = {}, - uint32_t dynamicStateCount_ = {}, - const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , dynamicStateCount( dynamicStateCount_ ) - , pDynamicStates( pDynamicStates_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDynamicStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ = {}, uint32_t dynamicStateCount_ = {}, const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), dynamicStateCount( dynamicStateCount_ ), pDynamicStates( pDynamicStates_ ) {} - VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo ) - offsetof( PipelineDynamicStateCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineDynamicStateCreateInfo& operator=( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineDynamicStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicStates_ ) + : flags( flags_ ), dynamicStateCount( static_cast( dynamicStates_.size() ) ), pDynamicStates( dynamicStates_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PipelineDynamicStateCreateInfo & operator=( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineDynamicStateCreateInfo & operator=( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineDynamicStateCreateInfo ) ); return *this; } @@ -35460,6 +37825,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineDynamicStateCreateInfo & setDynamicStates( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicStates_ ) VULKAN_HPP_NOEXCEPT + { + dynamicStateCount = static_cast( dynamicStates_.size() ); + pDynamicStates = dynamicStates_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkPipelineDynamicStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -35470,6 +37845,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineDynamicStateCreateInfo const& ) const = default; +#else bool operator==( PipelineDynamicStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -35483,6 +37862,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDynamicStateCreateInfo; @@ -35490,62 +37872,50 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags = {}; uint32_t dynamicStateCount = {}; const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates = {}; + }; static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PipelineDynamicStateCreateInfo; + }; + struct GraphicsPipelineCreateInfo { - VULKAN_HPP_CONSTEXPR GraphicsPipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, - uint32_t stageCount_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, - VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, - uint32_t subpass_ = {}, - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, - int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , stageCount( stageCount_ ) - , pStages( pStages_ ) - , pVertexInputState( pVertexInputState_ ) - , pInputAssemblyState( pInputAssemblyState_ ) - , pTessellationState( pTessellationState_ ) - , pViewportState( pViewportState_ ) - , pRasterizationState( pRasterizationState_ ) - , pMultisampleState( pMultisampleState_ ) - , pDepthStencilState( pDepthStencilState_ ) - , pColorBlendState( pColorBlendState_ ) - , pDynamicState( pDynamicState_ ) - , layout( layout_ ) - , renderPass( renderPass_ ) - , subpass( subpass_ ) - , basePipelineHandle( basePipelineHandle_ ) - , basePipelineIndex( basePipelineIndex_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), stageCount( stageCount_ ), pStages( pStages_ ), pVertexInputState( pVertexInputState_ ), pInputAssemblyState( pInputAssemblyState_ ), pTessellationState( pTessellationState_ ), pViewportState( pViewportState_ ), pRasterizationState( pRasterizationState_ ), pMultisampleState( pMultisampleState_ ), pDepthStencilState( pDepthStencilState_ ), pColorBlendState( pColorBlendState_ ), pDynamicState( pDynamicState_ ), layout( layout_ ), renderPass( renderPass_ ), subpass( subpass_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ ) {} - VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & operator=( VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo ) - offsetof( GraphicsPipelineCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - GraphicsPipelineCreateInfo& operator=( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + GraphicsPipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {} ) + : flags( flags_ ), stageCount( static_cast( stages_.size() ) ), pStages( stages_.data() ), pVertexInputState( pVertexInputState_ ), pInputAssemblyState( pInputAssemblyState_ ), pTessellationState( pTessellationState_ ), pViewportState( pViewportState_ ), pRasterizationState( pRasterizationState_ ), pMultisampleState( pMultisampleState_ ), pDepthStencilState( pDepthStencilState_ ), pColorBlendState( pColorBlendState_ ), pDynamicState( pDynamicState_ ), layout( layout_ ), renderPass( renderPass_ ), subpass( subpass_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + GraphicsPipelineCreateInfo & operator=( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + GraphicsPipelineCreateInfo & operator=( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( GraphicsPipelineCreateInfo ) ); return *this; } @@ -35573,6 +37943,15 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + GraphicsPipelineCreateInfo & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + GraphicsPipelineCreateInfo & setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ ) VULKAN_HPP_NOEXCEPT { pVertexInputState = pVertexInputState_; @@ -35657,6 +38036,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkGraphicsPipelineCreateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -35667,6 +38047,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( GraphicsPipelineCreateInfo const& ) const = default; +#else bool operator==( GraphicsPipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -35694,6 +38078,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineCreateInfo; @@ -35715,562 +38102,50 @@ namespace VULKAN_HPP_NAMESPACE uint32_t subpass = {}; VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; int32_t basePipelineIndex = {}; + }; static_assert( sizeof( GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct XYColorEXT + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR XYColorEXT( float x_ = {}, - float y_ = {} ) VULKAN_HPP_NOEXCEPT - : x( x_ ) - , y( y_ ) - {} - - XYColorEXT( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - XYColorEXT& operator=( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - XYColorEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT - { - x = x_; - return *this; - } - - XYColorEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT - { - y = y_; - return *this; - } - - operator VkXYColorEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkXYColorEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( XYColorEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( x == rhs.x ) - && ( y == rhs.y ); - } - - bool operator!=( XYColorEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - float x = {}; - float y = {}; + using Type = GraphicsPipelineCreateInfo; }; - static_assert( sizeof( XYColorEXT ) == sizeof( VkXYColorEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct HdrMetadataEXT - { - VULKAN_HPP_CONSTEXPR HdrMetadataEXT( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ = {}, - VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ = {}, - VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ = {}, - VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ = {}, - float maxLuminance_ = {}, - float minLuminance_ = {}, - float maxContentLightLevel_ = {}, - float maxFrameAverageLightLevel_ = {} ) VULKAN_HPP_NOEXCEPT - : displayPrimaryRed( displayPrimaryRed_ ) - , displayPrimaryGreen( displayPrimaryGreen_ ) - , displayPrimaryBlue( displayPrimaryBlue_ ) - , whitePoint( whitePoint_ ) - , maxLuminance( maxLuminance_ ) - , minLuminance( minLuminance_ ) - , maxContentLightLevel( maxContentLightLevel_ ) - , maxFrameAverageLightLevel( maxFrameAverageLightLevel_ ) - {} - - VULKAN_HPP_NAMESPACE::HdrMetadataEXT & operator=( VULKAN_HPP_NAMESPACE::HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::HdrMetadataEXT ) - offsetof( HdrMetadataEXT, pNext ) ); - return *this; - } - - HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - HdrMetadataEXT& operator=( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - HdrMetadataEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - HdrMetadataEXT & setDisplayPrimaryRed( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ ) VULKAN_HPP_NOEXCEPT - { - displayPrimaryRed = displayPrimaryRed_; - return *this; - } - - HdrMetadataEXT & setDisplayPrimaryGreen( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ ) VULKAN_HPP_NOEXCEPT - { - displayPrimaryGreen = displayPrimaryGreen_; - return *this; - } - - HdrMetadataEXT & setDisplayPrimaryBlue( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ ) VULKAN_HPP_NOEXCEPT - { - displayPrimaryBlue = displayPrimaryBlue_; - return *this; - } - - HdrMetadataEXT & setWhitePoint( VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ ) VULKAN_HPP_NOEXCEPT - { - whitePoint = whitePoint_; - return *this; - } - - HdrMetadataEXT & setMaxLuminance( float maxLuminance_ ) VULKAN_HPP_NOEXCEPT - { - maxLuminance = maxLuminance_; - return *this; - } - - HdrMetadataEXT & setMinLuminance( float minLuminance_ ) VULKAN_HPP_NOEXCEPT - { - minLuminance = minLuminance_; - return *this; - } - - HdrMetadataEXT & setMaxContentLightLevel( float maxContentLightLevel_ ) VULKAN_HPP_NOEXCEPT - { - maxContentLightLevel = maxContentLightLevel_; - return *this; - } - - HdrMetadataEXT & setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ ) VULKAN_HPP_NOEXCEPT - { - maxFrameAverageLightLevel = maxFrameAverageLightLevel_; - return *this; - } - - operator VkHdrMetadataEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkHdrMetadataEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( HdrMetadataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( displayPrimaryRed == rhs.displayPrimaryRed ) - && ( displayPrimaryGreen == rhs.displayPrimaryGreen ) - && ( displayPrimaryBlue == rhs.displayPrimaryBlue ) - && ( whitePoint == rhs.whitePoint ) - && ( maxLuminance == rhs.maxLuminance ) - && ( minLuminance == rhs.minLuminance ) - && ( maxContentLightLevel == rhs.maxContentLightLevel ) - && ( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel ); - } - - bool operator!=( HdrMetadataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHdrMetadataEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed = {}; - VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen = {}; - VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue = {}; - VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint = {}; - float maxLuminance = {}; - float minLuminance = {}; - float maxContentLightLevel = {}; - float maxFrameAverageLightLevel = {}; - }; - static_assert( sizeof( HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct HeadlessSurfaceCreateInfoEXT - { - VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - {} - - VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT ) - offsetof( HeadlessSurfaceCreateInfoEXT, pNext ) ); - return *this; - } - - HeadlessSurfaceCreateInfoEXT( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - HeadlessSurfaceCreateInfoEXT& operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - HeadlessSurfaceCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - HeadlessSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - operator VkHeadlessSurfaceCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkHeadlessSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( HeadlessSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ); - } - - bool operator!=( HeadlessSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHeadlessSurfaceCreateInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags = {}; - }; - static_assert( sizeof( HeadlessSurfaceCreateInfoEXT ) == sizeof( VkHeadlessSurfaceCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_IOS_MVK - - struct IOSSurfaceCreateInfoMVK - { - VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ = {}, - const void* pView_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , pView( pView_ ) - {} - - VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & operator=( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK ) - offsetof( IOSSurfaceCreateInfoMVK, pNext ) ); - return *this; - } - - IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - IOSSurfaceCreateInfoMVK& operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - IOSSurfaceCreateInfoMVK & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - IOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - IOSSurfaceCreateInfoMVK & setPView( const void* pView_ ) VULKAN_HPP_NOEXCEPT - { - pView = pView_; - return *this; - } - - operator VkIOSSurfaceCreateInfoMVK const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkIOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( IOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pView == rhs.pView ); - } - - bool operator!=( IOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIosSurfaceCreateInfoMVK; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags = {}; - const void* pView = {}; - }; - static_assert( sizeof( IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_IOS_MVK*/ - - struct ImageBlit - { - VULKAN_HPP_CONSTEXPR_14 ImageBlit( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, - std::array const& srcOffsets_ = {}, - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, - std::array const& dstOffsets_ = {} ) VULKAN_HPP_NOEXCEPT - : srcSubresource( srcSubresource_ ) - , srcOffsets{} - , dstSubresource( dstSubresource_ ) - , dstOffsets{} - { - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( srcOffsets, srcOffsets_ ); - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( dstOffsets, dstOffsets_ ); - } - - ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageBlit& operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageBlit & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ ) VULKAN_HPP_NOEXCEPT - { - srcSubresource = srcSubresource_; - return *this; - } - - ImageBlit & setSrcOffsets( std::array srcOffsets_ ) VULKAN_HPP_NOEXCEPT - { - memcpy( srcOffsets, srcOffsets_.data(), 2 * sizeof( VULKAN_HPP_NAMESPACE::Offset3D ) ); - return *this; - } - - ImageBlit & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ ) VULKAN_HPP_NOEXCEPT - { - dstSubresource = dstSubresource_; - return *this; - } - - ImageBlit & setDstOffsets( std::array dstOffsets_ ) VULKAN_HPP_NOEXCEPT - { - memcpy( dstOffsets, dstOffsets_.data(), 2 * sizeof( VULKAN_HPP_NAMESPACE::Offset3D ) ); - return *this; - } - - operator VkImageBlit const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageBlit &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( srcSubresource == rhs.srcSubresource ) - && ( memcmp( srcOffsets, rhs.srcOffsets, 2 * sizeof( VULKAN_HPP_NAMESPACE::Offset3D ) ) == 0 ) - && ( dstSubresource == rhs.dstSubresource ) - && ( memcmp( dstOffsets, rhs.dstOffsets, 2 * sizeof( VULKAN_HPP_NAMESPACE::Offset3D ) ) == 0 ); - } - - bool operator!=( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; - VULKAN_HPP_NAMESPACE::Offset3D srcOffsets[2] = {}; - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; - VULKAN_HPP_NAMESPACE::Offset3D dstOffsets[2] = {}; - }; - static_assert( sizeof( ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ImageCopy - { - VULKAN_HPP_CONSTEXPR ImageCopy( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, - VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, - VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, - VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT - : srcSubresource( srcSubresource_ ) - , srcOffset( srcOffset_ ) - , dstSubresource( dstSubresource_ ) - , dstOffset( dstOffset_ ) - , extent( extent_ ) - {} - - ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageCopy& operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageCopy & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ ) VULKAN_HPP_NOEXCEPT - { - srcSubresource = srcSubresource_; - return *this; - } - - ImageCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ ) VULKAN_HPP_NOEXCEPT - { - srcOffset = srcOffset_; - return *this; - } - - ImageCopy & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ ) VULKAN_HPP_NOEXCEPT - { - dstSubresource = dstSubresource_; - return *this; - } - - ImageCopy & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ ) VULKAN_HPP_NOEXCEPT - { - dstOffset = dstOffset_; - return *this; - } - - ImageCopy & setExtent( VULKAN_HPP_NAMESPACE::Extent3D extent_ ) VULKAN_HPP_NOEXCEPT - { - extent = extent_; - return *this; - } - - operator VkImageCopy const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageCopy &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( srcSubresource == rhs.srcSubresource ) - && ( srcOffset == rhs.srcOffset ) - && ( dstSubresource == rhs.dstSubresource ) - && ( dstOffset == rhs.dstOffset ) - && ( extent == rhs.extent ); - } - - bool operator!=( ImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; - VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; - VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; - VULKAN_HPP_NAMESPACE::Extent3D extent = {}; - }; - static_assert( sizeof( ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ImageCreateInfo { - VULKAN_HPP_CONSTEXPR ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, - VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, - uint32_t mipLevels_ = {}, - uint32_t arrayLayers_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, - VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, - uint32_t queueFamilyIndexCount_ = {}, - const uint32_t* pQueueFamilyIndices_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , imageType( imageType_ ) - , format( format_ ) - , extent( extent_ ) - , mipLevels( mipLevels_ ) - , arrayLayers( arrayLayers_ ) - , samples( samples_ ) - , tiling( tiling_ ) - , usage( usage_ ) - , sharingMode( sharingMode_ ) - , queueFamilyIndexCount( queueFamilyIndexCount_ ) - , pQueueFamilyIndices( pQueueFamilyIndices_ ) - , initialLayout( initialLayout_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCreateInfo(VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, uint32_t mipLevels_ = {}, uint32_t arrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t* pQueueFamilyIndices_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), imageType( imageType_ ), format( format_ ), extent( extent_ ), mipLevels( mipLevels_ ), arrayLayers( arrayLayers_ ), samples( samples_ ), tiling( tiling_ ), usage( usage_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( queueFamilyIndexCount_ ), pQueueFamilyIndices( pQueueFamilyIndices_ ), initialLayout( initialLayout_ ) {} - VULKAN_HPP_NAMESPACE::ImageCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageCreateInfo ) - offsetof( ImageCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR ImageCreateInfo( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageCreateInfo( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - ImageCreateInfo& operator=( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, VULKAN_HPP_NAMESPACE::ImageType imageType_, VULKAN_HPP_NAMESPACE::Format format_, VULKAN_HPP_NAMESPACE::Extent3D extent_, uint32_t mipLevels_, uint32_t arrayLayers_, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_, VULKAN_HPP_NAMESPACE::ImageTiling tiling_, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) + : flags( flags_ ), imageType( imageType_ ), format( format_ ), extent( extent_ ), mipLevels( mipLevels_ ), arrayLayers( arrayLayers_ ), samples( samples_ ), tiling( tiling_ ), usage( usage_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() ), initialLayout( initialLayout_ ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ImageCreateInfo & operator=( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageCreateInfo & operator=( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImageCreateInfo ) ); return *this; } @@ -36298,7 +38173,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ImageCreateInfo & setExtent( VULKAN_HPP_NAMESPACE::Extent3D extent_ ) VULKAN_HPP_NOEXCEPT + ImageCreateInfo & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT { extent = extent_; return *this; @@ -36352,12 +38227,22 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + ImageCreateInfo & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); + pQueueFamilyIndices = queueFamilyIndices_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + ImageCreateInfo & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT { initialLayout = initialLayout_; return *this; } + operator VkImageCreateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -36368,6 +38253,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageCreateInfo const& ) const = default; +#else bool operator==( ImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -36391,6 +38280,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCreateInfo; @@ -36408,1221 +38300,44 @@ namespace VULKAN_HPP_NAMESPACE uint32_t queueFamilyIndexCount = {}; const uint32_t* pQueueFamilyIndices = {}; VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; static_assert( sizeof( ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct SubresourceLayout + template <> + struct CppType { - SubresourceLayout( VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ = {} ) VULKAN_HPP_NOEXCEPT - : offset( offset_ ) - , size( size_ ) - , rowPitch( rowPitch_ ) - , arrayPitch( arrayPitch_ ) - , depthPitch( depthPitch_ ) - {} - - SubresourceLayout( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SubresourceLayout& operator=( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkSubresourceLayout const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSubresourceLayout &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( SubresourceLayout const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( offset == rhs.offset ) - && ( size == rhs.size ) - && ( rowPitch == rhs.rowPitch ) - && ( arrayPitch == rhs.arrayPitch ) - && ( depthPitch == rhs.depthPitch ); - } - - bool operator!=( SubresourceLayout const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; - VULKAN_HPP_NAMESPACE::DeviceSize rowPitch = {}; - VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch = {}; - VULKAN_HPP_NAMESPACE::DeviceSize depthPitch = {}; + using Type = ImageCreateInfo; }; - static_assert( sizeof( SubresourceLayout ) == sizeof( VkSubresourceLayout ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ImageDrmFormatModifierExplicitCreateInfoEXT - { - VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_ = {}, - uint32_t drmFormatModifierPlaneCount_ = {}, - const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts_ = {} ) VULKAN_HPP_NOEXCEPT - : drmFormatModifier( drmFormatModifier_ ) - , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ) - , pPlaneLayouts( pPlaneLayouts_ ) - {} - - VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT ) - offsetof( ImageDrmFormatModifierExplicitCreateInfoEXT, pNext ) ); - return *this; - } - - ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageDrmFormatModifierExplicitCreateInfoEXT& operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageDrmFormatModifierExplicitCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT - { - drmFormatModifier = drmFormatModifier_; - return *this; - } - - ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ ) VULKAN_HPP_NOEXCEPT - { - drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_; - return *this; - } - - ImageDrmFormatModifierExplicitCreateInfoEXT & setPPlaneLayouts( const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts_ ) VULKAN_HPP_NOEXCEPT - { - pPlaneLayouts = pPlaneLayouts_; - return *this; - } - - operator VkImageDrmFormatModifierExplicitCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( drmFormatModifier == rhs.drmFormatModifier ) - && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) - && ( pPlaneLayouts == rhs.pPlaneLayouts ); - } - - bool operator!=( ImageDrmFormatModifierExplicitCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT; - const void* pNext = {}; - uint64_t drmFormatModifier = {}; - uint32_t drmFormatModifierPlaneCount = {}; - const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts = {}; - }; - static_assert( sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierExplicitCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ImageDrmFormatModifierListCreateInfoEXT - { - VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( uint32_t drmFormatModifierCount_ = {}, - const uint64_t* pDrmFormatModifiers_ = {} ) VULKAN_HPP_NOEXCEPT - : drmFormatModifierCount( drmFormatModifierCount_ ) - , pDrmFormatModifiers( pDrmFormatModifiers_ ) - {} - - VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT ) - offsetof( ImageDrmFormatModifierListCreateInfoEXT, pNext ) ); - return *this; - } - - ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageDrmFormatModifierListCreateInfoEXT& operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageDrmFormatModifierListCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) VULKAN_HPP_NOEXCEPT - { - drmFormatModifierCount = drmFormatModifierCount_; - return *this; - } - - ImageDrmFormatModifierListCreateInfoEXT & setPDrmFormatModifiers( const uint64_t* pDrmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT - { - pDrmFormatModifiers = pDrmFormatModifiers_; - return *this; - } - - operator VkImageDrmFormatModifierListCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageDrmFormatModifierListCreateInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageDrmFormatModifierListCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) - && ( pDrmFormatModifiers == rhs.pDrmFormatModifiers ); - } - - bool operator!=( ImageDrmFormatModifierListCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT; - const void* pNext = {}; - uint32_t drmFormatModifierCount = {}; - const uint64_t* pDrmFormatModifiers = {}; - }; - static_assert( sizeof( ImageDrmFormatModifierListCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierListCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ImageDrmFormatModifierPropertiesEXT - { - ImageDrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {} ) VULKAN_HPP_NOEXCEPT - : drmFormatModifier( drmFormatModifier_ ) - {} - - VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT ) - offsetof( ImageDrmFormatModifierPropertiesEXT, pNext ) ); - return *this; - } - - ImageDrmFormatModifierPropertiesEXT( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageDrmFormatModifierPropertiesEXT& operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkImageDrmFormatModifierPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageDrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( drmFormatModifier == rhs.drmFormatModifier ); - } - - bool operator!=( ImageDrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT; - void* pNext = {}; - uint64_t drmFormatModifier = {}; - }; - static_assert( sizeof( ImageDrmFormatModifierPropertiesEXT ) == sizeof( VkImageDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ImageFormatListCreateInfo - { - VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo( uint32_t viewFormatCount_ = {}, - const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ = {} ) VULKAN_HPP_NOEXCEPT - : viewFormatCount( viewFormatCount_ ) - , pViewFormats( pViewFormats_ ) - {} - - VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo ) - offsetof( ImageFormatListCreateInfo, pNext ) ); - return *this; - } - - ImageFormatListCreateInfo( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageFormatListCreateInfo& operator=( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageFormatListCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImageFormatListCreateInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT - { - viewFormatCount = viewFormatCount_; - return *this; - } - - ImageFormatListCreateInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ ) VULKAN_HPP_NOEXCEPT - { - pViewFormats = pViewFormats_; - return *this; - } - - operator VkImageFormatListCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageFormatListCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageFormatListCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( viewFormatCount == rhs.viewFormatCount ) - && ( pViewFormats == rhs.pViewFormats ); - } - - bool operator!=( ImageFormatListCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatListCreateInfo; - const void* pNext = {}; - uint32_t viewFormatCount = {}; - const VULKAN_HPP_NAMESPACE::Format* pViewFormats = {}; - }; - static_assert( sizeof( ImageFormatListCreateInfo ) == sizeof( VkImageFormatListCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ImageFormatProperties2 - { - ImageFormatProperties2( VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {} ) VULKAN_HPP_NOEXCEPT - : imageFormatProperties( imageFormatProperties_ ) - {} - - VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & operator=( VULKAN_HPP_NAMESPACE::ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageFormatProperties2 ) - offsetof( ImageFormatProperties2, pNext ) ); - return *this; - } - - ImageFormatProperties2( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageFormatProperties2& operator=( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkImageFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( imageFormatProperties == rhs.imageFormatProperties ); - } - - bool operator!=( ImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatProperties2; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {}; - }; - static_assert( sizeof( ImageFormatProperties2 ) == sizeof( VkImageFormatProperties2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ImageSubresourceRange - { - VULKAN_HPP_CONSTEXPR ImageSubresourceRange( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, - uint32_t baseMipLevel_ = {}, - uint32_t levelCount_ = {}, - uint32_t baseArrayLayer_ = {}, - uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT - : aspectMask( aspectMask_ ) - , baseMipLevel( baseMipLevel_ ) - , levelCount( levelCount_ ) - , baseArrayLayer( baseArrayLayer_ ) - , layerCount( layerCount_ ) - {} - - ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageSubresourceRange& operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageSubresourceRange & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT - { - aspectMask = aspectMask_; - return *this; - } - - ImageSubresourceRange & setBaseMipLevel( uint32_t baseMipLevel_ ) VULKAN_HPP_NOEXCEPT - { - baseMipLevel = baseMipLevel_; - return *this; - } - - ImageSubresourceRange & setLevelCount( uint32_t levelCount_ ) VULKAN_HPP_NOEXCEPT - { - levelCount = levelCount_; - return *this; - } - - ImageSubresourceRange & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT - { - baseArrayLayer = baseArrayLayer_; - return *this; - } - - ImageSubresourceRange & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT - { - layerCount = layerCount_; - return *this; - } - - operator VkImageSubresourceRange const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageSubresourceRange const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( aspectMask == rhs.aspectMask ) - && ( baseMipLevel == rhs.baseMipLevel ) - && ( levelCount == rhs.levelCount ) - && ( baseArrayLayer == rhs.baseArrayLayer ) - && ( layerCount == rhs.layerCount ); - } - - bool operator!=( ImageSubresourceRange const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; - uint32_t baseMipLevel = {}; - uint32_t levelCount = {}; - uint32_t baseArrayLayer = {}; - uint32_t layerCount = {}; - }; - static_assert( sizeof( ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ImageMemoryBarrier - { - VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - uint32_t srcQueueFamilyIndex_ = {}, - uint32_t dstQueueFamilyIndex_ = {}, - VULKAN_HPP_NAMESPACE::Image image_ = {}, - VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {} ) VULKAN_HPP_NOEXCEPT - : srcAccessMask( srcAccessMask_ ) - , dstAccessMask( dstAccessMask_ ) - , oldLayout( oldLayout_ ) - , newLayout( newLayout_ ) - , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) - , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) - , image( image_ ) - , subresourceRange( subresourceRange_ ) - {} - - VULKAN_HPP_NAMESPACE::ImageMemoryBarrier & operator=( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier ) - offsetof( ImageMemoryBarrier, pNext ) ); - return *this; - } - - ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageMemoryBarrier& operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageMemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImageMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT - { - srcAccessMask = srcAccessMask_; - return *this; - } - - ImageMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT - { - dstAccessMask = dstAccessMask_; - return *this; - } - - ImageMemoryBarrier & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT - { - oldLayout = oldLayout_; - return *this; - } - - ImageMemoryBarrier & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT - { - newLayout = newLayout_; - return *this; - } - - ImageMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT - { - srcQueueFamilyIndex = srcQueueFamilyIndex_; - return *this; - } - - ImageMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT - { - dstQueueFamilyIndex = dstQueueFamilyIndex_; - return *this; - } - - ImageMemoryBarrier & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT - { - image = image_; - return *this; - } - - ImageMemoryBarrier & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ ) VULKAN_HPP_NOEXCEPT - { - subresourceRange = subresourceRange_; - return *this; - } - - operator VkImageMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( srcAccessMask == rhs.srcAccessMask ) - && ( dstAccessMask == rhs.dstAccessMask ) - && ( oldLayout == rhs.oldLayout ) - && ( newLayout == rhs.newLayout ) - && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) - && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) - && ( image == rhs.image ) - && ( subresourceRange == rhs.subresourceRange ); - } - - bool operator!=( ImageMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; - VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - uint32_t srcQueueFamilyIndex = {}; - uint32_t dstQueueFamilyIndex = {}; - VULKAN_HPP_NAMESPACE::Image image = {}; - VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; - }; - static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ImageMemoryRequirementsInfo2 - { - VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {} ) VULKAN_HPP_NOEXCEPT - : image( image_ ) - {} - - VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & operator=( VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 ) - offsetof( ImageMemoryRequirementsInfo2, pNext ) ); - return *this; - } - - ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageMemoryRequirementsInfo2& operator=( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImageMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT - { - image = image_; - return *this; - } - - operator VkImageMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( image == rhs.image ); - } - - bool operator!=( ImageMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryRequirementsInfo2; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Image image = {}; - }; - static_assert( sizeof( ImageMemoryRequirementsInfo2 ) == sizeof( VkImageMemoryRequirementsInfo2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_FUCHSIA - - struct ImagePipeSurfaceCreateInfoFUCHSIA - { - VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = {}, - zx_handle_t imagePipeHandle_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , imagePipeHandle( imagePipeHandle_ ) - {} - - VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & operator=( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA ) - offsetof( ImagePipeSurfaceCreateInfoFUCHSIA, pNext ) ); - return *this; - } - - ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImagePipeSurfaceCreateInfoFUCHSIA& operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImagePipeSurfaceCreateInfoFUCHSIA & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImagePipeSurfaceCreateInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - ImagePipeSurfaceCreateInfoFUCHSIA & setImagePipeHandle( zx_handle_t imagePipeHandle_ ) VULKAN_HPP_NOEXCEPT - { - imagePipeHandle = imagePipeHandle_; - return *this; - } - - operator VkImagePipeSurfaceCreateInfoFUCHSIA const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImagePipeSurfaceCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ImagePipeSurfaceCreateInfoFUCHSIA const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( imagePipeHandle == rhs.imagePipeHandle ); - } - - bool operator!=( ImagePipeSurfaceCreateInfoFUCHSIA const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags = {}; - zx_handle_t imagePipeHandle = {}; - }; - static_assert( sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) == sizeof( VkImagePipeSurfaceCreateInfoFUCHSIA ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - - struct ImagePlaneMemoryRequirementsInfo - { - VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT - : planeAspect( planeAspect_ ) - {} - - VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo & operator=( VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo ) - offsetof( ImagePlaneMemoryRequirementsInfo, pNext ) ); - return *this; - } - - ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImagePlaneMemoryRequirementsInfo& operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImagePlaneMemoryRequirementsInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImagePlaneMemoryRequirementsInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT - { - planeAspect = planeAspect_; - return *this; - } - - operator VkImagePlaneMemoryRequirementsInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImagePlaneMemoryRequirementsInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ImagePlaneMemoryRequirementsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( planeAspect == rhs.planeAspect ); - } - - bool operator!=( ImagePlaneMemoryRequirementsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; - }; - static_assert( sizeof( ImagePlaneMemoryRequirementsInfo ) == sizeof( VkImagePlaneMemoryRequirementsInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ImageResolve - { - VULKAN_HPP_CONSTEXPR ImageResolve( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, - VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, - VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, - VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT - : srcSubresource( srcSubresource_ ) - , srcOffset( srcOffset_ ) - , dstSubresource( dstSubresource_ ) - , dstOffset( dstOffset_ ) - , extent( extent_ ) - {} - - ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageResolve& operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageResolve & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ ) VULKAN_HPP_NOEXCEPT - { - srcSubresource = srcSubresource_; - return *this; - } - - ImageResolve & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ ) VULKAN_HPP_NOEXCEPT - { - srcOffset = srcOffset_; - return *this; - } - - ImageResolve & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ ) VULKAN_HPP_NOEXCEPT - { - dstSubresource = dstSubresource_; - return *this; - } - - ImageResolve & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ ) VULKAN_HPP_NOEXCEPT - { - dstOffset = dstOffset_; - return *this; - } - - ImageResolve & setExtent( VULKAN_HPP_NAMESPACE::Extent3D extent_ ) VULKAN_HPP_NOEXCEPT - { - extent = extent_; - return *this; - } - - operator VkImageResolve const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageResolve &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageResolve const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( srcSubresource == rhs.srcSubresource ) - && ( srcOffset == rhs.srcOffset ) - && ( dstSubresource == rhs.dstSubresource ) - && ( dstOffset == rhs.dstOffset ) - && ( extent == rhs.extent ); - } - - bool operator!=( ImageResolve const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; - VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; - VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; - VULKAN_HPP_NAMESPACE::Extent3D extent = {}; - }; - static_assert( sizeof( ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ImageSparseMemoryRequirementsInfo2 - { - VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {} ) VULKAN_HPP_NOEXCEPT - : image( image_ ) - {} - - VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & operator=( VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 ) - offsetof( ImageSparseMemoryRequirementsInfo2, pNext ) ); - return *this; - } - - ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageSparseMemoryRequirementsInfo2& operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageSparseMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImageSparseMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT - { - image = image_; - return *this; - } - - operator VkImageSparseMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageSparseMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageSparseMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( image == rhs.image ); - } - - bool operator!=( ImageSparseMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Image image = {}; - }; - static_assert( sizeof( ImageSparseMemoryRequirementsInfo2 ) == sizeof( VkImageSparseMemoryRequirementsInfo2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ImageStencilUsageCreateInfo - { - VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ = {} ) VULKAN_HPP_NOEXCEPT - : stencilUsage( stencilUsage_ ) - {} - - VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo ) - offsetof( ImageStencilUsageCreateInfo, pNext ) ); - return *this; - } - - ImageStencilUsageCreateInfo( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageStencilUsageCreateInfo& operator=( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageStencilUsageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImageStencilUsageCreateInfo & setStencilUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ ) VULKAN_HPP_NOEXCEPT - { - stencilUsage = stencilUsage_; - return *this; - } - - operator VkImageStencilUsageCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageStencilUsageCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageStencilUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( stencilUsage == rhs.stencilUsage ); - } - - bool operator!=( ImageStencilUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageStencilUsageCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage = {}; - }; - static_assert( sizeof( ImageStencilUsageCreateInfo ) == sizeof( VkImageStencilUsageCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ImageSwapchainCreateInfoKHR - { - VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {} ) VULKAN_HPP_NOEXCEPT - : swapchain( swapchain_ ) - {} - - VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR ) - offsetof( ImageSwapchainCreateInfoKHR, pNext ) ); - return *this; - } - - ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageSwapchainCreateInfoKHR& operator=( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageSwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImageSwapchainCreateInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT - { - swapchain = swapchain_; - return *this; - } - - operator VkImageSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( swapchain == rhs.swapchain ); - } - - bool operator!=( ImageSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSwapchainCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; - }; - static_assert( sizeof( ImageSwapchainCreateInfoKHR ) == sizeof( VkImageSwapchainCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ImageViewASTCDecodeModeEXT - { - VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( VULKAN_HPP_NAMESPACE::Format decodeMode_ = VULKAN_HPP_NAMESPACE::Format::eUndefined ) VULKAN_HPP_NOEXCEPT - : decodeMode( decodeMode_ ) - {} - - VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT & operator=( VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT ) - offsetof( ImageViewASTCDecodeModeEXT, pNext ) ); - return *this; - } - - ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageViewASTCDecodeModeEXT& operator=( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageViewASTCDecodeModeEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImageViewASTCDecodeModeEXT & setDecodeMode( VULKAN_HPP_NAMESPACE::Format decodeMode_ ) VULKAN_HPP_NOEXCEPT - { - decodeMode = decodeMode_; - return *this; - } - - operator VkImageViewASTCDecodeModeEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageViewASTCDecodeModeEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageViewASTCDecodeModeEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( decodeMode == rhs.decodeMode ); - } - - bool operator!=( ImageViewASTCDecodeModeEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAstcDecodeModeEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Format decodeMode = VULKAN_HPP_NAMESPACE::Format::eUndefined; - }; - static_assert( sizeof( ImageViewASTCDecodeModeEXT ) == sizeof( VkImageViewASTCDecodeModeEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ImageViewCreateInfo { - VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Image image_ = {}, - VULKAN_HPP_NAMESPACE::ImageViewType viewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D, - VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, - VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , image( image_ ) - , viewType( viewType_ ) - , format( format_ ) - , components( components_ ) - , subresourceRange( subresourceRange_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewCreateInfo(VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageViewType viewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), image( image_ ), viewType( viewType_ ), format( format_ ), components( components_ ), subresourceRange( subresourceRange_ ) {} - VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo ) - offsetof( ImageViewCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - ImageViewCreateInfo& operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ImageViewCreateInfo & operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageViewCreateInfo & operator=( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImageViewCreateInfo ) ); return *this; } @@ -37656,18 +38371,19 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ImageViewCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping components_ ) VULKAN_HPP_NOEXCEPT + ImageViewCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT { components = components_; return *this; } - ImageViewCreateInfo & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ ) VULKAN_HPP_NOEXCEPT + ImageViewCreateInfo & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT { subresourceRange = subresourceRange_; return *this; } + operator VkImageViewCreateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -37678,6 +38394,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageViewCreateInfo const& ) const = default; +#else bool operator==( ImageViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -37694,6 +38414,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewCreateInfo; @@ -37704,34 +38427,6270 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; + }; static_assert( sizeof( ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct ImageViewHandleInfoNVX + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, - VULKAN_HPP_NAMESPACE::Sampler sampler_ = {} ) VULKAN_HPP_NOEXCEPT - : imageView( imageView_ ) - , descriptorType( descriptorType_ ) - , sampler( sampler_ ) + using Type = ImageViewCreateInfo; + }; + + struct IndirectCommandsLayoutTokenNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutTokenNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV(VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup, uint32_t stream_ = {}, uint32_t offset_ = {}, uint32_t vertexBindingUnit_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ = {}, uint32_t pushconstantOffset_ = {}, uint32_t pushconstantSize_ = {}, VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ = {}, uint32_t indexTypeCount_ = {}, const VULKAN_HPP_NAMESPACE::IndexType* pIndexTypes_ = {}, const uint32_t* pIndexTypeValues_ = {}) VULKAN_HPP_NOEXCEPT + : tokenType( tokenType_ ), stream( stream_ ), offset( offset_ ), vertexBindingUnit( vertexBindingUnit_ ), vertexDynamicStride( vertexDynamicStride_ ), pushconstantPipelineLayout( pushconstantPipelineLayout_ ), pushconstantShaderStageFlags( pushconstantShaderStageFlags_ ), pushconstantOffset( pushconstantOffset_ ), pushconstantSize( pushconstantSize_ ), indirectStateFlags( indirectStateFlags_ ), indexTypeCount( indexTypeCount_ ), pIndexTypes( pIndexTypes_ ), pIndexTypeValues( pIndexTypeValues_ ) {} - VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & operator=( VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsLayoutTokenNV( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX ) - offsetof( ImageViewHandleInfoNVX, pNext ) ); + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + IndirectCommandsLayoutTokenNV( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_, uint32_t stream_, uint32_t offset_, uint32_t vertexBindingUnit_, VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_, VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_, VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_, uint32_t pushconstantOffset_, uint32_t pushconstantSize_, VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypes_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypeValues_ = {} ) + : tokenType( tokenType_ ), stream( stream_ ), offset( offset_ ), vertexBindingUnit( vertexBindingUnit_ ), vertexDynamicStride( vertexDynamicStride_ ), pushconstantPipelineLayout( pushconstantPipelineLayout_ ), pushconstantShaderStageFlags( pushconstantShaderStageFlags_ ), pushconstantOffset( pushconstantOffset_ ), pushconstantSize( pushconstantSize_ ), indirectStateFlags( indirectStateFlags_ ), indexTypeCount( static_cast( indexTypes_.size() ) ), pIndexTypes( indexTypes_.data() ), pIndexTypeValues( indexTypeValues_.data() ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( indexTypes_.size() == indexTypeValues_.size() ); +#else + if ( indexTypes_.size() != indexTypeValues_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::IndirectCommandsLayoutTokenNV::IndirectCommandsLayoutTokenNV: indexTypes_.size() != indexTypeValues_.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + IndirectCommandsLayoutTokenNV & operator=( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } + IndirectCommandsLayoutTokenNV & operator=( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( IndirectCommandsLayoutTokenNV ) ); + return *this; + } + + IndirectCommandsLayoutTokenNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + IndirectCommandsLayoutTokenNV & setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ ) VULKAN_HPP_NOEXCEPT + { + tokenType = tokenType_; + return *this; + } + + IndirectCommandsLayoutTokenNV & setStream( uint32_t stream_ ) VULKAN_HPP_NOEXCEPT + { + stream = stream_; + return *this; + } + + IndirectCommandsLayoutTokenNV & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + IndirectCommandsLayoutTokenNV & setVertexBindingUnit( uint32_t vertexBindingUnit_ ) VULKAN_HPP_NOEXCEPT + { + vertexBindingUnit = vertexBindingUnit_; + return *this; + } + + IndirectCommandsLayoutTokenNV & setVertexDynamicStride( VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexDynamicStride = vertexDynamicStride_; + return *this; + } + + IndirectCommandsLayoutTokenNV & setPushconstantPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ ) VULKAN_HPP_NOEXCEPT + { + pushconstantPipelineLayout = pushconstantPipelineLayout_; + return *this; + } + + IndirectCommandsLayoutTokenNV & setPushconstantShaderStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ ) VULKAN_HPP_NOEXCEPT + { + pushconstantShaderStageFlags = pushconstantShaderStageFlags_; + return *this; + } + + IndirectCommandsLayoutTokenNV & setPushconstantOffset( uint32_t pushconstantOffset_ ) VULKAN_HPP_NOEXCEPT + { + pushconstantOffset = pushconstantOffset_; + return *this; + } + + IndirectCommandsLayoutTokenNV & setPushconstantSize( uint32_t pushconstantSize_ ) VULKAN_HPP_NOEXCEPT + { + pushconstantSize = pushconstantSize_; + return *this; + } + + IndirectCommandsLayoutTokenNV & setIndirectStateFlags( VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ ) VULKAN_HPP_NOEXCEPT + { + indirectStateFlags = indirectStateFlags_; + return *this; + } + + IndirectCommandsLayoutTokenNV & setIndexTypeCount( uint32_t indexTypeCount_ ) VULKAN_HPP_NOEXCEPT + { + indexTypeCount = indexTypeCount_; + return *this; + } + + IndirectCommandsLayoutTokenNV & setPIndexTypes( const VULKAN_HPP_NAMESPACE::IndexType* pIndexTypes_ ) VULKAN_HPP_NOEXCEPT + { + pIndexTypes = pIndexTypes_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + IndirectCommandsLayoutTokenNV & setIndexTypes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypes_ ) VULKAN_HPP_NOEXCEPT + { + indexTypeCount = static_cast( indexTypes_.size() ); + pIndexTypes = indexTypes_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + IndirectCommandsLayoutTokenNV & setPIndexTypeValues( const uint32_t* pIndexTypeValues_ ) VULKAN_HPP_NOEXCEPT + { + pIndexTypeValues = pIndexTypeValues_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + IndirectCommandsLayoutTokenNV & setIndexTypeValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypeValues_ ) VULKAN_HPP_NOEXCEPT + { + indexTypeCount = static_cast( indexTypeValues_.size() ); + pIndexTypeValues = indexTypeValues_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkIndirectCommandsLayoutTokenNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsLayoutTokenNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( IndirectCommandsLayoutTokenNV const& ) const = default; +#else + bool operator==( IndirectCommandsLayoutTokenNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( tokenType == rhs.tokenType ) + && ( stream == rhs.stream ) + && ( offset == rhs.offset ) + && ( vertexBindingUnit == rhs.vertexBindingUnit ) + && ( vertexDynamicStride == rhs.vertexDynamicStride ) + && ( pushconstantPipelineLayout == rhs.pushconstantPipelineLayout ) + && ( pushconstantShaderStageFlags == rhs.pushconstantShaderStageFlags ) + && ( pushconstantOffset == rhs.pushconstantOffset ) + && ( pushconstantSize == rhs.pushconstantSize ) + && ( indirectStateFlags == rhs.indirectStateFlags ) + && ( indexTypeCount == rhs.indexTypeCount ) + && ( pIndexTypes == rhs.pIndexTypes ) + && ( pIndexTypeValues == rhs.pIndexTypeValues ); + } + + bool operator!=( IndirectCommandsLayoutTokenNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutTokenNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup; + uint32_t stream = {}; + uint32_t offset = {}; + uint32_t vertexBindingUnit = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags = {}; + uint32_t pushconstantOffset = {}; + uint32_t pushconstantSize = {}; + VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags = {}; + uint32_t indexTypeCount = {}; + const VULKAN_HPP_NAMESPACE::IndexType* pIndexTypes = {}; + const uint32_t* pIndexTypeValues = {}; + + }; + static_assert( sizeof( IndirectCommandsLayoutTokenNV ) == sizeof( VkIndirectCommandsLayoutTokenNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = IndirectCommandsLayoutTokenNV; + }; + + struct IndirectCommandsLayoutCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, uint32_t tokenCount_ = {}, const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV* pTokens_ = {}, uint32_t streamCount_ = {}, const uint32_t* pStreamStrides_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), tokenCount( tokenCount_ ), pTokens( pTokens_ ), streamCount( streamCount_ ), pStreamStrides( pStreamStrides_ ) + {} + + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsLayoutCreateInfoNV( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + IndirectCommandsLayoutCreateInfoNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tokens_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & streamStrides_ = {} ) + : flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), tokenCount( static_cast( tokens_.size() ) ), pTokens( tokens_.data() ), streamCount( static_cast( streamStrides_.size() ) ), pStreamStrides( streamStrides_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + IndirectCommandsLayoutCreateInfoNV & operator=( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + IndirectCommandsLayoutCreateInfoNV & operator=( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( IndirectCommandsLayoutCreateInfoNV ) ); + return *this; + } + + IndirectCommandsLayoutCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + IndirectCommandsLayoutCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + IndirectCommandsLayoutCreateInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + IndirectCommandsLayoutCreateInfoNV & setTokenCount( uint32_t tokenCount_ ) VULKAN_HPP_NOEXCEPT + { + tokenCount = tokenCount_; + return *this; + } + + IndirectCommandsLayoutCreateInfoNV & setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV* pTokens_ ) VULKAN_HPP_NOEXCEPT + { + pTokens = pTokens_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + IndirectCommandsLayoutCreateInfoNV & setTokens( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tokens_ ) VULKAN_HPP_NOEXCEPT + { + tokenCount = static_cast( tokens_.size() ); + pTokens = tokens_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + IndirectCommandsLayoutCreateInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT + { + streamCount = streamCount_; + return *this; + } + + IndirectCommandsLayoutCreateInfoNV & setPStreamStrides( const uint32_t* pStreamStrides_ ) VULKAN_HPP_NOEXCEPT + { + pStreamStrides = pStreamStrides_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + IndirectCommandsLayoutCreateInfoNV & setStreamStrides( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & streamStrides_ ) VULKAN_HPP_NOEXCEPT + { + streamCount = static_cast( streamStrides_.size() ); + pStreamStrides = streamStrides_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkIndirectCommandsLayoutCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsLayoutCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( IndirectCommandsLayoutCreateInfoNV const& ) const = default; +#else + bool operator==( IndirectCommandsLayoutCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( pipelineBindPoint == rhs.pipelineBindPoint ) + && ( tokenCount == rhs.tokenCount ) + && ( pTokens == rhs.pTokens ) + && ( streamCount == rhs.streamCount ) + && ( pStreamStrides == rhs.pStreamStrides ); + } + + bool operator!=( IndirectCommandsLayoutCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + uint32_t tokenCount = {}; + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV* pTokens = {}; + uint32_t streamCount = {}; + const uint32_t* pStreamStrides = {}; + + }; + static_assert( sizeof( IndirectCommandsLayoutCreateInfoNV ) == sizeof( VkIndirectCommandsLayoutCreateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = IndirectCommandsLayoutCreateInfoNV; + }; + + struct PipelineCacheCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCacheCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ = {}, size_t initialDataSize_ = {}, const void* pInitialData_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), initialDataSize( initialDataSize_ ), pInitialData( pInitialData_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + template + PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_ ) + : flags( flags_ ), initialDataSize( initialData_.size() * sizeof(T) ), pInitialData( initialData_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PipelineCacheCreateInfo & operator=( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineCacheCreateInfo & operator=( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineCacheCreateInfo ) ); + return *this; + } + + PipelineCacheCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineCacheCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineCacheCreateInfo & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT + { + initialDataSize = initialDataSize_; + return *this; + } + + PipelineCacheCreateInfo & setPInitialData( const void* pInitialData_ ) VULKAN_HPP_NOEXCEPT + { + pInitialData = pInitialData_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + template + PipelineCacheCreateInfo & setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_ ) VULKAN_HPP_NOEXCEPT + { + initialDataSize = initialData_.size() * sizeof(T); + pInitialData = initialData_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkPipelineCacheCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCacheCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineCacheCreateInfo const& ) const = default; +#else + bool operator==( PipelineCacheCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( initialDataSize == rhs.initialDataSize ) + && ( pInitialData == rhs.pInitialData ); + } + + bool operator!=( PipelineCacheCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCacheCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags = {}; + size_t initialDataSize = {}; + const void* pInitialData = {}; + + }; + static_assert( sizeof( PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineCacheCreateInfo; + }; + + struct PushConstantRange + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PushConstantRange(VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, uint32_t offset_ = {}, uint32_t size_ = {}) VULKAN_HPP_NOEXCEPT + : stageFlags( stageFlags_ ), offset( offset_ ), size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR PushConstantRange( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PushConstantRange( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PushConstantRange & operator=( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PushConstantRange & operator=( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PushConstantRange ) ); + return *this; + } + + PushConstantRange & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + { + stageFlags = stageFlags_; + return *this; + } + + PushConstantRange & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + PushConstantRange & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + + operator VkPushConstantRange const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PushConstantRange const& ) const = default; +#else + bool operator==( PushConstantRange const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( stageFlags == rhs.stageFlags ) + && ( offset == rhs.offset ) + && ( size == rhs.size ); + } + + bool operator!=( PushConstantRange const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + uint32_t offset = {}; + uint32_t size = {}; + + }; + static_assert( sizeof( PushConstantRange ) == sizeof( VkPushConstantRange ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineLayoutCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLayoutCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo(VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ = {}, uint32_t setLayoutCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ = {}, uint32_t pushConstantRangeCount_ = {}, const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), setLayoutCount( setLayoutCount_ ), pSetLayouts( pSetLayouts_ ), pushConstantRangeCount( pushConstantRangeCount_ ), pPushConstantRanges( pPushConstantRanges_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineLayoutCreateInfo( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ = {} ) + : flags( flags_ ), setLayoutCount( static_cast( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() ), pushConstantRangeCount( static_cast( pushConstantRanges_.size() ) ), pPushConstantRanges( pushConstantRanges_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PipelineLayoutCreateInfo & operator=( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineLayoutCreateInfo & operator=( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineLayoutCreateInfo ) ); + return *this; + } + + PipelineLayoutCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineLayoutCreateInfo & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT + { + setLayoutCount = setLayoutCount_; + return *this; + } + + PipelineLayoutCreateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pSetLayouts = pSetLayouts_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineLayoutCreateInfo & setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_ ) VULKAN_HPP_NOEXCEPT + { + setLayoutCount = static_cast( setLayouts_.size() ); + pSetLayouts = setLayouts_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + PipelineLayoutCreateInfo & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT + { + pushConstantRangeCount = pushConstantRangeCount_; + return *this; + } + + PipelineLayoutCreateInfo & setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + { + pPushConstantRanges = pPushConstantRanges_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineLayoutCreateInfo & setPushConstantRanges( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + { + pushConstantRangeCount = static_cast( pushConstantRanges_.size() ); + pPushConstantRanges = pushConstantRanges_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkPipelineLayoutCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineLayoutCreateInfo const& ) const = default; +#else + bool operator==( PipelineLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( setLayoutCount == rhs.setLayoutCount ) + && ( pSetLayouts == rhs.pSetLayouts ) + && ( pushConstantRangeCount == rhs.pushConstantRangeCount ) + && ( pPushConstantRanges == rhs.pPushConstantRanges ); + } + + bool operator!=( PipelineLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLayoutCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags = {}; + uint32_t setLayoutCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts = {}; + uint32_t pushConstantRangeCount = {}; + const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges = {}; + + }; + static_assert( sizeof( PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineLayoutCreateInfo; + }; + + struct PrivateDataSlotCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePrivateDataSlotCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfoEXT(VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagsEXT flags_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfoEXT( PrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PrivateDataSlotCreateInfoEXT( VkPrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PrivateDataSlotCreateInfoEXT & operator=( VkPrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PrivateDataSlotCreateInfoEXT & operator=( PrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PrivateDataSlotCreateInfoEXT ) ); + return *this; + } + + PrivateDataSlotCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PrivateDataSlotCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + + operator VkPrivateDataSlotCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPrivateDataSlotCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PrivateDataSlotCreateInfoEXT const& ) const = default; +#else + bool operator==( PrivateDataSlotCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ); + } + + bool operator!=( PrivateDataSlotCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePrivateDataSlotCreateInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagsEXT flags = {}; + + }; + static_assert( sizeof( PrivateDataSlotCreateInfoEXT ) == sizeof( VkPrivateDataSlotCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PrivateDataSlotCreateInfoEXT; + }; + + class PrivateDataSlotEXT + { + public: + using CType = VkPrivateDataSlotEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlotEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR PrivateDataSlotEXT() VULKAN_HPP_NOEXCEPT + : m_privateDataSlotEXT(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR PrivateDataSlotEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_privateDataSlotEXT(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PrivateDataSlotEXT( VkPrivateDataSlotEXT privateDataSlotEXT ) VULKAN_HPP_NOEXCEPT + : m_privateDataSlotEXT( privateDataSlotEXT ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + PrivateDataSlotEXT & operator=(VkPrivateDataSlotEXT privateDataSlotEXT) VULKAN_HPP_NOEXCEPT + { + m_privateDataSlotEXT = privateDataSlotEXT; + return *this; + } +#endif + + PrivateDataSlotEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_privateDataSlotEXT = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PrivateDataSlotEXT const& ) const = default; +#else + bool operator==( PrivateDataSlotEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlotEXT == rhs.m_privateDataSlotEXT; + } + + bool operator!=(PrivateDataSlotEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlotEXT != rhs.m_privateDataSlotEXT; + } + + bool operator<(PrivateDataSlotEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlotEXT < rhs.m_privateDataSlotEXT; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPrivateDataSlotEXT() const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlotEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlotEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlotEXT == VK_NULL_HANDLE; + } + + private: + VkPrivateDataSlotEXT m_privateDataSlotEXT; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT ) == sizeof( VkPrivateDataSlotEXT ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT; + }; + + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct QueryPoolCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo(VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::QueryType queryType_ = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion, uint32_t queryCount_ = {}, VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), queryType( queryType_ ), queryCount( queryCount_ ), pipelineStatistics( pipelineStatistics_ ) + {} + + VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + QueryPoolCreateInfo & operator=( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + QueryPoolCreateInfo & operator=( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( QueryPoolCreateInfo ) ); + return *this; + } + + QueryPoolCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + QueryPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + QueryPoolCreateInfo & setQueryType( VULKAN_HPP_NAMESPACE::QueryType queryType_ ) VULKAN_HPP_NOEXCEPT + { + queryType = queryType_; + return *this; + } + + QueryPoolCreateInfo & setQueryCount( uint32_t queryCount_ ) VULKAN_HPP_NOEXCEPT + { + queryCount = queryCount_; + return *this; + } + + QueryPoolCreateInfo & setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT + { + pipelineStatistics = pipelineStatistics_; + return *this; + } + + + operator VkQueryPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueryPoolCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( QueryPoolCreateInfo const& ) const = default; +#else + bool operator==( QueryPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( queryType == rhs.queryType ) + && ( queryCount == rhs.queryCount ) + && ( pipelineStatistics == rhs.pipelineStatistics ); + } + + bool operator!=( QueryPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::QueryType queryType = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion; + uint32_t queryCount = {}; + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {}; + + }; + static_assert( sizeof( QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = QueryPoolCreateInfo; + }; + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct RayTracingShaderGroupCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingShaderGroupCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR(VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, uint32_t generalShader_ = {}, uint32_t closestHitShader_ = {}, uint32_t anyHitShader_ = {}, uint32_t intersectionShader_ = {}, const void* pShaderGroupCaptureReplayHandle_ = {}) VULKAN_HPP_NOEXCEPT + : type( type_ ), generalShader( generalShader_ ), closestHitShader( closestHitShader_ ), anyHitShader( anyHitShader_ ), intersectionShader( intersectionShader_ ), pShaderGroupCaptureReplayHandle( pShaderGroupCaptureReplayHandle_ ) + {} + + VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingShaderGroupCreateInfoKHR( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + RayTracingShaderGroupCreateInfoKHR & operator=( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & operator=( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( RayTracingShaderGroupCreateInfoKHR ) ); + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT + { + generalShader = generalShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT + { + closestHitShader = closestHitShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT + { + anyHitShader = anyHitShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT + { + intersectionShader = intersectionShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & setPShaderGroupCaptureReplayHandle( const void* pShaderGroupCaptureReplayHandle_ ) VULKAN_HPP_NOEXCEPT + { + pShaderGroupCaptureReplayHandle = pShaderGroupCaptureReplayHandle_; + return *this; + } + + + operator VkRayTracingShaderGroupCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingShaderGroupCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RayTracingShaderGroupCreateInfoKHR const& ) const = default; +#else + bool operator==( RayTracingShaderGroupCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( type == rhs.type ) + && ( generalShader == rhs.generalShader ) + && ( closestHitShader == rhs.closestHitShader ) + && ( anyHitShader == rhs.anyHitShader ) + && ( intersectionShader == rhs.intersectionShader ) + && ( pShaderGroupCaptureReplayHandle == rhs.pShaderGroupCaptureReplayHandle ); + } + + bool operator!=( RayTracingShaderGroupCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral; + uint32_t generalShader = {}; + uint32_t closestHitShader = {}; + uint32_t anyHitShader = {}; + uint32_t intersectionShader = {}; + const void* pShaderGroupCaptureReplayHandle = {}; + + }; + static_assert( sizeof( RayTracingShaderGroupCreateInfoKHR ) == sizeof( VkRayTracingShaderGroupCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RayTracingShaderGroupCreateInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct PipelineLibraryCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLibraryCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR(uint32_t libraryCount_ = {}, const VULKAN_HPP_NAMESPACE::Pipeline* pLibraries_ = {}) VULKAN_HPP_NOEXCEPT + : libraryCount( libraryCount_ ), pLibraries( pLibraries_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineLibraryCreateInfoKHR( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineLibraryCreateInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & libraries_ ) + : libraryCount( static_cast( libraries_.size() ) ), pLibraries( libraries_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PipelineLibraryCreateInfoKHR & operator=( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineLibraryCreateInfoKHR & operator=( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineLibraryCreateInfoKHR ) ); + return *this; + } + + PipelineLibraryCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineLibraryCreateInfoKHR & setLibraryCount( uint32_t libraryCount_ ) VULKAN_HPP_NOEXCEPT + { + libraryCount = libraryCount_; + return *this; + } + + PipelineLibraryCreateInfoKHR & setPLibraries( const VULKAN_HPP_NAMESPACE::Pipeline* pLibraries_ ) VULKAN_HPP_NOEXCEPT + { + pLibraries = pLibraries_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineLibraryCreateInfoKHR & setLibraries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & libraries_ ) VULKAN_HPP_NOEXCEPT + { + libraryCount = static_cast( libraries_.size() ); + pLibraries = libraries_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkPipelineLibraryCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineLibraryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineLibraryCreateInfoKHR const& ) const = default; +#else + bool operator==( PipelineLibraryCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( libraryCount == rhs.libraryCount ) + && ( pLibraries == rhs.pLibraries ); + } + + bool operator!=( PipelineLibraryCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLibraryCreateInfoKHR; + const void* pNext = {}; + uint32_t libraryCount = {}; + const VULKAN_HPP_NAMESPACE::Pipeline* pLibraries = {}; + + }; + static_assert( sizeof( PipelineLibraryCreateInfoKHR ) == sizeof( VkPipelineLibraryCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineLibraryCreateInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct RayTracingPipelineInterfaceCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR(uint32_t maxPayloadSize_ = {}, uint32_t maxAttributeSize_ = {}, uint32_t maxCallableSize_ = {}) VULKAN_HPP_NOEXCEPT + : maxPayloadSize( maxPayloadSize_ ), maxAttributeSize( maxAttributeSize_ ), maxCallableSize( maxCallableSize_ ) + {} + + VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingPipelineInterfaceCreateInfoKHR( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + RayTracingPipelineInterfaceCreateInfoKHR & operator=( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RayTracingPipelineInterfaceCreateInfoKHR & operator=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( RayTracingPipelineInterfaceCreateInfoKHR ) ); + return *this; + } + + RayTracingPipelineInterfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RayTracingPipelineInterfaceCreateInfoKHR & setMaxPayloadSize( uint32_t maxPayloadSize_ ) VULKAN_HPP_NOEXCEPT + { + maxPayloadSize = maxPayloadSize_; + return *this; + } + + RayTracingPipelineInterfaceCreateInfoKHR & setMaxAttributeSize( uint32_t maxAttributeSize_ ) VULKAN_HPP_NOEXCEPT + { + maxAttributeSize = maxAttributeSize_; + return *this; + } + + RayTracingPipelineInterfaceCreateInfoKHR & setMaxCallableSize( uint32_t maxCallableSize_ ) VULKAN_HPP_NOEXCEPT + { + maxCallableSize = maxCallableSize_; + return *this; + } + + + operator VkRayTracingPipelineInterfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingPipelineInterfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RayTracingPipelineInterfaceCreateInfoKHR const& ) const = default; +#else + bool operator==( RayTracingPipelineInterfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxPayloadSize == rhs.maxPayloadSize ) + && ( maxAttributeSize == rhs.maxAttributeSize ) + && ( maxCallableSize == rhs.maxCallableSize ); + } + + bool operator!=( RayTracingPipelineInterfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR; + const void* pNext = {}; + uint32_t maxPayloadSize = {}; + uint32_t maxAttributeSize = {}; + uint32_t maxCallableSize = {}; + + }; + static_assert( sizeof( RayTracingPipelineInterfaceCreateInfoKHR ) == sizeof( VkRayTracingPipelineInterfaceCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RayTracingPipelineInterfaceCreateInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct RayTracingPipelineCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, uint32_t groupCount_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR* pGroups_ = {}, uint32_t maxRecursionDepth_ = {}, VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR libraries_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), stageCount( stageCount_ ), pStages( pStages_ ), groupCount( groupCount_ ), pGroups( pGroups_ ), maxRecursionDepth( maxRecursionDepth_ ), libraries( libraries_ ), pLibraryInterface( pLibraryInterface_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingPipelineCreateInfoKHR( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RayTracingPipelineCreateInfoKHR( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_ = {}, uint32_t maxRecursionDepth_ = {}, VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR libraries_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {} ) + : flags( flags_ ), stageCount( static_cast( stages_.size() ) ), pStages( stages_.data() ), groupCount( static_cast( groups_.size() ) ), pGroups( groups_.data() ), maxRecursionDepth( maxRecursionDepth_ ), libraries( libraries_ ), pLibraryInterface( pLibraryInterface_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + RayTracingPipelineCreateInfoKHR & operator=( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RayTracingPipelineCreateInfoKHR & operator=( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( RayTracingPipelineCreateInfoKHR ) ); + return *this; + } + + RayTracingPipelineCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RayTracingPipelineCreateInfoKHR & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + RayTracingPipelineCreateInfoKHR & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = groupCount_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR* pGroups_ ) VULKAN_HPP_NOEXCEPT + { + pGroups = pGroups_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RayTracingPipelineCreateInfoKHR & setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = static_cast( groups_.size() ); + pGroups = groups_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + RayTracingPipelineCreateInfoKHR & setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) VULKAN_HPP_NOEXCEPT + { + maxRecursionDepth = maxRecursionDepth_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setLibraries( VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const & libraries_ ) VULKAN_HPP_NOEXCEPT + { + libraries = libraries_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setPLibraryInterface( const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface_ ) VULKAN_HPP_NOEXCEPT + { + pLibraryInterface = pLibraryInterface_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } + + + operator VkRayTracingPipelineCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingPipelineCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RayTracingPipelineCreateInfoKHR const& ) const = default; +#else + bool operator==( RayTracingPipelineCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( stageCount == rhs.stageCount ) + && ( pStages == rhs.pStages ) + && ( groupCount == rhs.groupCount ) + && ( pGroups == rhs.pGroups ) + && ( maxRecursionDepth == rhs.maxRecursionDepth ) + && ( libraries == rhs.libraries ) + && ( pLibraryInterface == rhs.pLibraryInterface ) + && ( layout == rhs.layout ) + && ( basePipelineHandle == rhs.basePipelineHandle ) + && ( basePipelineIndex == rhs.basePipelineIndex ); + } + + bool operator!=( RayTracingPipelineCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {}; + uint32_t groupCount = {}; + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR* pGroups = {}; + uint32_t maxRecursionDepth = {}; + VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR libraries = {}; + const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; + + }; + static_assert( sizeof( RayTracingPipelineCreateInfoKHR ) == sizeof( VkRayTracingPipelineCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RayTracingPipelineCreateInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct RayTracingShaderGroupCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingShaderGroupCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV(VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, uint32_t generalShader_ = {}, uint32_t closestHitShader_ = {}, uint32_t anyHitShader_ = {}, uint32_t intersectionShader_ = {}) VULKAN_HPP_NOEXCEPT + : type( type_ ), generalShader( generalShader_ ), closestHitShader( closestHitShader_ ), anyHitShader( anyHitShader_ ), intersectionShader( intersectionShader_ ) + {} + + VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingShaderGroupCreateInfoNV( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + RayTracingShaderGroupCreateInfoNV & operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RayTracingShaderGroupCreateInfoNV & operator=( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( RayTracingShaderGroupCreateInfoNV ) ); + return *this; + } + + RayTracingShaderGroupCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RayTracingShaderGroupCreateInfoNV & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + RayTracingShaderGroupCreateInfoNV & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT + { + generalShader = generalShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoNV & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT + { + closestHitShader = closestHitShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoNV & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT + { + anyHitShader = anyHitShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoNV & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT + { + intersectionShader = intersectionShader_; + return *this; + } + + + operator VkRayTracingShaderGroupCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RayTracingShaderGroupCreateInfoNV const& ) const = default; +#else + bool operator==( RayTracingShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( type == rhs.type ) + && ( generalShader == rhs.generalShader ) + && ( closestHitShader == rhs.closestHitShader ) + && ( anyHitShader == rhs.anyHitShader ) + && ( intersectionShader == rhs.intersectionShader ); + } + + bool operator!=( RayTracingShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral; + uint32_t generalShader = {}; + uint32_t closestHitShader = {}; + uint32_t anyHitShader = {}; + uint32_t intersectionShader = {}; + + }; + static_assert( sizeof( RayTracingShaderGroupCreateInfoNV ) == sizeof( VkRayTracingShaderGroupCreateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RayTracingShaderGroupCreateInfoNV; + }; + + struct RayTracingPipelineCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, uint32_t groupCount_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups_ = {}, uint32_t maxRecursionDepth_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), stageCount( stageCount_ ), pStages( pStages_ ), groupCount( groupCount_ ), pGroups( pGroups_ ), maxRecursionDepth( maxRecursionDepth_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingPipelineCreateInfoNV( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RayTracingPipelineCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_ = {}, uint32_t maxRecursionDepth_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {} ) + : flags( flags_ ), stageCount( static_cast( stages_.size() ) ), pStages( stages_.data() ), groupCount( static_cast( groups_.size() ) ), pGroups( groups_.data() ), maxRecursionDepth( maxRecursionDepth_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + RayTracingPipelineCreateInfoNV & operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RayTracingPipelineCreateInfoNV & operator=( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( RayTracingPipelineCreateInfoNV ) ); + return *this; + } + + RayTracingPipelineCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RayTracingPipelineCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + RayTracingPipelineCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + RayTracingPipelineCreateInfoNV & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RayTracingPipelineCreateInfoNV & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + RayTracingPipelineCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = groupCount_; + return *this; + } + + RayTracingPipelineCreateInfoNV & setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups_ ) VULKAN_HPP_NOEXCEPT + { + pGroups = pGroups_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RayTracingPipelineCreateInfoNV & setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = static_cast( groups_.size() ); + pGroups = groups_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + RayTracingPipelineCreateInfoNV & setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) VULKAN_HPP_NOEXCEPT + { + maxRecursionDepth = maxRecursionDepth_; + return *this; + } + + RayTracingPipelineCreateInfoNV & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + RayTracingPipelineCreateInfoNV & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + RayTracingPipelineCreateInfoNV & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } + + + operator VkRayTracingPipelineCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingPipelineCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RayTracingPipelineCreateInfoNV const& ) const = default; +#else + bool operator==( RayTracingPipelineCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( stageCount == rhs.stageCount ) + && ( pStages == rhs.pStages ) + && ( groupCount == rhs.groupCount ) + && ( pGroups == rhs.pGroups ) + && ( maxRecursionDepth == rhs.maxRecursionDepth ) + && ( layout == rhs.layout ) + && ( basePipelineHandle == rhs.basePipelineHandle ) + && ( basePipelineIndex == rhs.basePipelineIndex ); + } + + bool operator!=( RayTracingPipelineCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {}; + uint32_t groupCount = {}; + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups = {}; + uint32_t maxRecursionDepth = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; + + }; + static_assert( sizeof( RayTracingPipelineCreateInfoNV ) == sizeof( VkRayTracingPipelineCreateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RayTracingPipelineCreateInfoNV; + }; + + struct SubpassDescription + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDescription(VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, uint32_t inputAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment_ = {}, uint32_t preserveAttachmentCount_ = {}, const uint32_t* pPreserveAttachments_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), inputAttachmentCount( inputAttachmentCount_ ), pInputAttachments( pInputAttachments_ ), colorAttachmentCount( colorAttachmentCount_ ), pColorAttachments( pColorAttachments_ ), pResolveAttachments( pResolveAttachments_ ), pDepthStencilAttachment( pDepthStencilAttachment_ ), preserveAttachmentCount( preserveAttachmentCount_ ), pPreserveAttachments( pPreserveAttachments_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassDescription( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDescription( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SubpassDescription( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & inputAttachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachments_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & resolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ = {} ) + : flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), inputAttachmentCount( static_cast( inputAttachments_.size() ) ), pInputAttachments( inputAttachments_.data() ), colorAttachmentCount( static_cast( colorAttachments_.size() ) ), pColorAttachments( colorAttachments_.data() ), pResolveAttachments( resolveAttachments_.data() ), pDepthStencilAttachment( pDepthStencilAttachment_ ), preserveAttachmentCount( static_cast( preserveAttachments_.size() ) ), pPreserveAttachments( preserveAttachments_.data() ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) ); +#else + if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SubpassDescription::SubpassDescription: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SubpassDescription & operator=( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SubpassDescription & operator=( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SubpassDescription ) ); + return *this; + } + + SubpassDescription & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + SubpassDescription & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + SubpassDescription & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentCount = inputAttachmentCount_; + return *this; + } + + SubpassDescription & setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pInputAttachments = pInputAttachments_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SubpassDescription & setInputAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & inputAttachments_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentCount = static_cast( inputAttachments_.size() ); + pInputAttachments = inputAttachments_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + SubpassDescription & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + SubpassDescription & setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachments = pColorAttachments_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SubpassDescription & setColorAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachments_.size() ); + pColorAttachments = colorAttachments_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + SubpassDescription & setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pResolveAttachments = pResolveAttachments_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SubpassDescription & setResolveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & resolveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( resolveAttachments_.size() ); + pResolveAttachments = resolveAttachments_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + SubpassDescription & setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pDepthStencilAttachment = pDepthStencilAttachment_; + return *this; + } + + SubpassDescription & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + preserveAttachmentCount = preserveAttachmentCount_; + return *this; + } + + SubpassDescription & setPPreserveAttachments( const uint32_t* pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pPreserveAttachments = pPreserveAttachments_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SubpassDescription & setPreserveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + preserveAttachmentCount = static_cast( preserveAttachments_.size() ); + pPreserveAttachments = preserveAttachments_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkSubpassDescription const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDescription &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SubpassDescription const& ) const = default; +#else + bool operator==( SubpassDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) + && ( pipelineBindPoint == rhs.pipelineBindPoint ) + && ( inputAttachmentCount == rhs.inputAttachmentCount ) + && ( pInputAttachments == rhs.pInputAttachments ) + && ( colorAttachmentCount == rhs.colorAttachmentCount ) + && ( pColorAttachments == rhs.pColorAttachments ) + && ( pResolveAttachments == rhs.pResolveAttachments ) + && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) + && ( preserveAttachmentCount == rhs.preserveAttachmentCount ) + && ( pPreserveAttachments == rhs.pPreserveAttachments ); + } + + bool operator!=( SubpassDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + uint32_t inputAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment = {}; + uint32_t preserveAttachmentCount = {}; + const uint32_t* pPreserveAttachments = {}; + + }; + static_assert( sizeof( SubpassDescription ) == sizeof( VkSubpassDescription ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SubpassDependency + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDependency(uint32_t srcSubpass_ = {}, uint32_t dstSubpass_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}) VULKAN_HPP_NOEXCEPT + : srcSubpass( srcSubpass_ ), dstSubpass( dstSubpass_ ), srcStageMask( srcStageMask_ ), dstStageMask( dstStageMask_ ), srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ), dependencyFlags( dependencyFlags_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassDependency( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDependency( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SubpassDependency & operator=( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SubpassDependency & operator=( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SubpassDependency ) ); + return *this; + } + + SubpassDependency & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT + { + srcSubpass = srcSubpass_; + return *this; + } + + SubpassDependency & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT + { + dstSubpass = dstSubpass_; + return *this; + } + + SubpassDependency & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + SubpassDependency & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + SubpassDependency & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + SubpassDependency & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + SubpassDependency & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT + { + dependencyFlags = dependencyFlags_; + return *this; + } + + + operator VkSubpassDependency const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDependency &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SubpassDependency const& ) const = default; +#else + bool operator==( SubpassDependency const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( srcSubpass == rhs.srcSubpass ) + && ( dstSubpass == rhs.dstSubpass ) + && ( srcStageMask == rhs.srcStageMask ) + && ( dstStageMask == rhs.dstStageMask ) + && ( srcAccessMask == rhs.srcAccessMask ) + && ( dstAccessMask == rhs.dstAccessMask ) + && ( dependencyFlags == rhs.dependencyFlags ); + } + + bool operator!=( SubpassDependency const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + uint32_t srcSubpass = {}; + uint32_t dstSubpass = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; + + }; + static_assert( sizeof( SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct RenderPassCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo(VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments_ = {}, uint32_t subpassCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses_ = {}, uint32_t dependencyCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ ), subpassCount( subpassCount_ ), pSubpasses( pSubpasses_ ), dependencyCount( dependencyCount_ ), pDependencies( pDependencies_ ) + {} + + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & subpasses_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dependencies_ = {} ) + : flags( flags_ ), attachmentCount( static_cast( attachments_.size() ) ), pAttachments( attachments_.data() ), subpassCount( static_cast( subpasses_.size() ) ), pSubpasses( subpasses_.data() ), dependencyCount( static_cast( dependencies_.size() ) ), pDependencies( dependencies_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + RenderPassCreateInfo & operator=( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RenderPassCreateInfo & operator=( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( RenderPassCreateInfo ) ); + return *this; + } + + RenderPassCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RenderPassCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + RenderPassCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + RenderPassCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RenderPassCreateInfo & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + RenderPassCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = subpassCount_; + return *this; + } + + RenderPassCreateInfo & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses_ ) VULKAN_HPP_NOEXCEPT + { + pSubpasses = pSubpasses_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RenderPassCreateInfo & setSubpasses( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & subpasses_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = static_cast( subpasses_.size() ); + pSubpasses = subpasses_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + RenderPassCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = dependencyCount_; + return *this; + } + + RenderPassCreateInfo & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies_ ) VULKAN_HPP_NOEXCEPT + { + pDependencies = pDependencies_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RenderPassCreateInfo & setDependencies( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dependencies_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = static_cast( dependencies_.size() ); + pDependencies = dependencies_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkRenderPassCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RenderPassCreateInfo const& ) const = default; +#else + bool operator==( RenderPassCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( attachmentCount == rhs.attachmentCount ) + && ( pAttachments == rhs.pAttachments ) + && ( subpassCount == rhs.subpassCount ) + && ( pSubpasses == rhs.pSubpasses ) + && ( dependencyCount == rhs.dependencyCount ) + && ( pDependencies == rhs.pDependencies ); + } + + bool operator!=( RenderPassCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments = {}; + uint32_t subpassCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses = {}; + uint32_t dependencyCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies = {}; + + }; + static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RenderPassCreateInfo; + }; + + struct SubpassDescription2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescription2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDescription2(VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, uint32_t viewMask_ = {}, uint32_t inputAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2* pInputAttachments_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2* pColorAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2* pResolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment_ = {}, uint32_t preserveAttachmentCount_ = {}, const uint32_t* pPreserveAttachments_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), viewMask( viewMask_ ), inputAttachmentCount( inputAttachmentCount_ ), pInputAttachments( pInputAttachments_ ), colorAttachmentCount( colorAttachmentCount_ ), pColorAttachments( pColorAttachments_ ), pResolveAttachments( pResolveAttachments_ ), pDepthStencilAttachment( pDepthStencilAttachment_ ), preserveAttachmentCount( preserveAttachmentCount_ ), pPreserveAttachments( pPreserveAttachments_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassDescription2( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDescription2( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SubpassDescription2( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, uint32_t viewMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & inputAttachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachments_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & resolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ = {} ) + : flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), viewMask( viewMask_ ), inputAttachmentCount( static_cast( inputAttachments_.size() ) ), pInputAttachments( inputAttachments_.data() ), colorAttachmentCount( static_cast( colorAttachments_.size() ) ), pColorAttachments( colorAttachments_.data() ), pResolveAttachments( resolveAttachments_.data() ), pDepthStencilAttachment( pDepthStencilAttachment_ ), preserveAttachmentCount( static_cast( preserveAttachments_.size() ) ), pPreserveAttachments( preserveAttachments_.data() ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) ); +#else + if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SubpassDescription2::SubpassDescription2: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SubpassDescription2 & operator=( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SubpassDescription2 & operator=( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SubpassDescription2 ) ); + return *this; + } + + SubpassDescription2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SubpassDescription2 & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + SubpassDescription2 & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + SubpassDescription2 & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT + { + viewMask = viewMask_; + return *this; + } + + SubpassDescription2 & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentCount = inputAttachmentCount_; + return *this; + } + + SubpassDescription2 & setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pInputAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pInputAttachments = pInputAttachments_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SubpassDescription2 & setInputAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & inputAttachments_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentCount = static_cast( inputAttachments_.size() ); + pInputAttachments = inputAttachments_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + SubpassDescription2 & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + SubpassDescription2 & setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pColorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachments = pColorAttachments_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SubpassDescription2 & setColorAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachments_.size() ); + pColorAttachments = colorAttachments_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + SubpassDescription2 & setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pResolveAttachments = pResolveAttachments_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SubpassDescription2 & setResolveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & resolveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( resolveAttachments_.size() ); + pResolveAttachments = resolveAttachments_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + SubpassDescription2 & setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pDepthStencilAttachment = pDepthStencilAttachment_; + return *this; + } + + SubpassDescription2 & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + preserveAttachmentCount = preserveAttachmentCount_; + return *this; + } + + SubpassDescription2 & setPPreserveAttachments( const uint32_t* pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pPreserveAttachments = pPreserveAttachments_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SubpassDescription2 & setPreserveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + preserveAttachmentCount = static_cast( preserveAttachments_.size() ); + pPreserveAttachments = preserveAttachments_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkSubpassDescription2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDescription2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SubpassDescription2 const& ) const = default; +#else + bool operator==( SubpassDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( pipelineBindPoint == rhs.pipelineBindPoint ) + && ( viewMask == rhs.viewMask ) + && ( inputAttachmentCount == rhs.inputAttachmentCount ) + && ( pInputAttachments == rhs.pInputAttachments ) + && ( colorAttachmentCount == rhs.colorAttachmentCount ) + && ( pColorAttachments == rhs.pColorAttachments ) + && ( pResolveAttachments == rhs.pResolveAttachments ) + && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) + && ( preserveAttachmentCount == rhs.preserveAttachmentCount ) + && ( pPreserveAttachments == rhs.pPreserveAttachments ); + } + + bool operator!=( SubpassDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescription2; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + uint32_t viewMask = {}; + uint32_t inputAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2* pInputAttachments = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2* pColorAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2* pResolveAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment = {}; + uint32_t preserveAttachmentCount = {}; + const uint32_t* pPreserveAttachments = {}; + + }; + static_assert( sizeof( SubpassDescription2 ) == sizeof( VkSubpassDescription2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SubpassDescription2; + }; + using SubpassDescription2KHR = SubpassDescription2; + + struct SubpassDependency2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDependency2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDependency2(uint32_t srcSubpass_ = {}, uint32_t dstSubpass_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, int32_t viewOffset_ = {}) VULKAN_HPP_NOEXCEPT + : srcSubpass( srcSubpass_ ), dstSubpass( dstSubpass_ ), srcStageMask( srcStageMask_ ), dstStageMask( dstStageMask_ ), srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ), dependencyFlags( dependencyFlags_ ), viewOffset( viewOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassDependency2( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDependency2( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SubpassDependency2 & operator=( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SubpassDependency2 & operator=( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SubpassDependency2 ) ); + return *this; + } + + SubpassDependency2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SubpassDependency2 & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT + { + srcSubpass = srcSubpass_; + return *this; + } + + SubpassDependency2 & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT + { + dstSubpass = dstSubpass_; + return *this; + } + + SubpassDependency2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + SubpassDependency2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + SubpassDependency2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + SubpassDependency2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + SubpassDependency2 & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT + { + dependencyFlags = dependencyFlags_; + return *this; + } + + SubpassDependency2 & setViewOffset( int32_t viewOffset_ ) VULKAN_HPP_NOEXCEPT + { + viewOffset = viewOffset_; + return *this; + } + + + operator VkSubpassDependency2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDependency2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SubpassDependency2 const& ) const = default; +#else + bool operator==( SubpassDependency2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcSubpass == rhs.srcSubpass ) + && ( dstSubpass == rhs.dstSubpass ) + && ( srcStageMask == rhs.srcStageMask ) + && ( dstStageMask == rhs.dstStageMask ) + && ( srcAccessMask == rhs.srcAccessMask ) + && ( dstAccessMask == rhs.dstAccessMask ) + && ( dependencyFlags == rhs.dependencyFlags ) + && ( viewOffset == rhs.viewOffset ); + } + + bool operator!=( SubpassDependency2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDependency2; + const void* pNext = {}; + uint32_t srcSubpass = {}; + uint32_t dstSubpass = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; + int32_t viewOffset = {}; + + }; + static_assert( sizeof( SubpassDependency2 ) == sizeof( VkSubpassDependency2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SubpassDependency2; + }; + using SubpassDependency2KHR = SubpassDependency2; + + struct RenderPassCreateInfo2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2(VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentDescription2* pAttachments_ = {}, uint32_t subpassCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDescription2* pSubpasses_ = {}, uint32_t dependencyCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDependency2* pDependencies_ = {}, uint32_t correlatedViewMaskCount_ = {}, const uint32_t* pCorrelatedViewMasks_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ ), subpassCount( subpassCount_ ), pSubpasses( pSubpasses_ ), dependencyCount( dependencyCount_ ), pDependencies( pDependencies_ ), correlatedViewMaskCount( correlatedViewMaskCount_ ), pCorrelatedViewMasks( pCorrelatedViewMasks_ ) + {} + + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassCreateInfo2( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & subpasses_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dependencies_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlatedViewMasks_ = {} ) + : flags( flags_ ), attachmentCount( static_cast( attachments_.size() ) ), pAttachments( attachments_.data() ), subpassCount( static_cast( subpasses_.size() ) ), pSubpasses( subpasses_.data() ), dependencyCount( static_cast( dependencies_.size() ) ), pDependencies( dependencies_.data() ), correlatedViewMaskCount( static_cast( correlatedViewMasks_.size() ) ), pCorrelatedViewMasks( correlatedViewMasks_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + RenderPassCreateInfo2 & operator=( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RenderPassCreateInfo2 & operator=( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( RenderPassCreateInfo2 ) ); + return *this; + } + + RenderPassCreateInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RenderPassCreateInfo2 & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + RenderPassCreateInfo2 & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + RenderPassCreateInfo2 & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription2* pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RenderPassCreateInfo2 & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + RenderPassCreateInfo2 & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = subpassCount_; + return *this; + } + + RenderPassCreateInfo2 & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription2* pSubpasses_ ) VULKAN_HPP_NOEXCEPT + { + pSubpasses = pSubpasses_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RenderPassCreateInfo2 & setSubpasses( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & subpasses_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = static_cast( subpasses_.size() ); + pSubpasses = subpasses_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + RenderPassCreateInfo2 & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = dependencyCount_; + return *this; + } + + RenderPassCreateInfo2 & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency2* pDependencies_ ) VULKAN_HPP_NOEXCEPT + { + pDependencies = pDependencies_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RenderPassCreateInfo2 & setDependencies( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dependencies_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = static_cast( dependencies_.size() ); + pDependencies = dependencies_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + RenderPassCreateInfo2 & setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ ) VULKAN_HPP_NOEXCEPT + { + correlatedViewMaskCount = correlatedViewMaskCount_; + return *this; + } + + RenderPassCreateInfo2 & setPCorrelatedViewMasks( const uint32_t* pCorrelatedViewMasks_ ) VULKAN_HPP_NOEXCEPT + { + pCorrelatedViewMasks = pCorrelatedViewMasks_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RenderPassCreateInfo2 & setCorrelatedViewMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlatedViewMasks_ ) VULKAN_HPP_NOEXCEPT + { + correlatedViewMaskCount = static_cast( correlatedViewMasks_.size() ); + pCorrelatedViewMasks = correlatedViewMasks_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkRenderPassCreateInfo2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassCreateInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RenderPassCreateInfo2 const& ) const = default; +#else + bool operator==( RenderPassCreateInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( attachmentCount == rhs.attachmentCount ) + && ( pAttachments == rhs.pAttachments ) + && ( subpassCount == rhs.subpassCount ) + && ( pSubpasses == rhs.pSubpasses ) + && ( dependencyCount == rhs.dependencyCount ) + && ( pDependencies == rhs.pDependencies ) + && ( correlatedViewMaskCount == rhs.correlatedViewMaskCount ) + && ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks ); + } + + bool operator!=( RenderPassCreateInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo2; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentDescription2* pAttachments = {}; + uint32_t subpassCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDescription2* pSubpasses = {}; + uint32_t dependencyCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDependency2* pDependencies = {}; + uint32_t correlatedViewMaskCount = {}; + const uint32_t* pCorrelatedViewMasks = {}; + + }; + static_assert( sizeof( RenderPassCreateInfo2 ) == sizeof( VkRenderPassCreateInfo2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RenderPassCreateInfo2; + }; + using RenderPassCreateInfo2KHR = RenderPassCreateInfo2; + + struct SamplerCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerCreateInfo(VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Filter magFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, VULKAN_HPP_NAMESPACE::Filter minFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest, VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, float mipLodBias_ = {}, VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ = {}, float maxAnisotropy_ = {}, VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ = {}, VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, float minLod_ = {}, float maxLod_ = {}, VULKAN_HPP_NAMESPACE::BorderColor borderColor_ = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack, VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), magFilter( magFilter_ ), minFilter( minFilter_ ), mipmapMode( mipmapMode_ ), addressModeU( addressModeU_ ), addressModeV( addressModeV_ ), addressModeW( addressModeW_ ), mipLodBias( mipLodBias_ ), anisotropyEnable( anisotropyEnable_ ), maxAnisotropy( maxAnisotropy_ ), compareEnable( compareEnable_ ), compareOp( compareOp_ ), minLod( minLod_ ), maxLod( maxLod_ ), borderColor( borderColor_ ), unnormalizedCoordinates( unnormalizedCoordinates_ ) + {} + + VULKAN_HPP_CONSTEXPR SamplerCreateInfo( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SamplerCreateInfo & operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SamplerCreateInfo & operator=( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SamplerCreateInfo ) ); + return *this; + } + + SamplerCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SamplerCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + SamplerCreateInfo & setMagFilter( VULKAN_HPP_NAMESPACE::Filter magFilter_ ) VULKAN_HPP_NOEXCEPT + { + magFilter = magFilter_; + return *this; + } + + SamplerCreateInfo & setMinFilter( VULKAN_HPP_NAMESPACE::Filter minFilter_ ) VULKAN_HPP_NOEXCEPT + { + minFilter = minFilter_; + return *this; + } + + SamplerCreateInfo & setMipmapMode( VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ ) VULKAN_HPP_NOEXCEPT + { + mipmapMode = mipmapMode_; + return *this; + } + + SamplerCreateInfo & setAddressModeU( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ ) VULKAN_HPP_NOEXCEPT + { + addressModeU = addressModeU_; + return *this; + } + + SamplerCreateInfo & setAddressModeV( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ ) VULKAN_HPP_NOEXCEPT + { + addressModeV = addressModeV_; + return *this; + } + + SamplerCreateInfo & setAddressModeW( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ ) VULKAN_HPP_NOEXCEPT + { + addressModeW = addressModeW_; + return *this; + } + + SamplerCreateInfo & setMipLodBias( float mipLodBias_ ) VULKAN_HPP_NOEXCEPT + { + mipLodBias = mipLodBias_; + return *this; + } + + SamplerCreateInfo & setAnisotropyEnable( VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ ) VULKAN_HPP_NOEXCEPT + { + anisotropyEnable = anisotropyEnable_; + return *this; + } + + SamplerCreateInfo & setMaxAnisotropy( float maxAnisotropy_ ) VULKAN_HPP_NOEXCEPT + { + maxAnisotropy = maxAnisotropy_; + return *this; + } + + SamplerCreateInfo & setCompareEnable( VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ ) VULKAN_HPP_NOEXCEPT + { + compareEnable = compareEnable_; + return *this; + } + + SamplerCreateInfo & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT + { + compareOp = compareOp_; + return *this; + } + + SamplerCreateInfo & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT + { + minLod = minLod_; + return *this; + } + + SamplerCreateInfo & setMaxLod( float maxLod_ ) VULKAN_HPP_NOEXCEPT + { + maxLod = maxLod_; + return *this; + } + + SamplerCreateInfo & setBorderColor( VULKAN_HPP_NAMESPACE::BorderColor borderColor_ ) VULKAN_HPP_NOEXCEPT + { + borderColor = borderColor_; + return *this; + } + + SamplerCreateInfo & setUnnormalizedCoordinates( VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ ) VULKAN_HPP_NOEXCEPT + { + unnormalizedCoordinates = unnormalizedCoordinates_; + return *this; + } + + + operator VkSamplerCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SamplerCreateInfo const& ) const = default; +#else + bool operator==( SamplerCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( magFilter == rhs.magFilter ) + && ( minFilter == rhs.minFilter ) + && ( mipmapMode == rhs.mipmapMode ) + && ( addressModeU == rhs.addressModeU ) + && ( addressModeV == rhs.addressModeV ) + && ( addressModeW == rhs.addressModeW ) + && ( mipLodBias == rhs.mipLodBias ) + && ( anisotropyEnable == rhs.anisotropyEnable ) + && ( maxAnisotropy == rhs.maxAnisotropy ) + && ( compareEnable == rhs.compareEnable ) + && ( compareOp == rhs.compareOp ) + && ( minLod == rhs.minLod ) + && ( maxLod == rhs.maxLod ) + && ( borderColor == rhs.borderColor ) + && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates ); + } + + bool operator!=( SamplerCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Filter magFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + VULKAN_HPP_NAMESPACE::Filter minFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest; + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; + float mipLodBias = {}; + VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable = {}; + float maxAnisotropy = {}; + VULKAN_HPP_NAMESPACE::Bool32 compareEnable = {}; + VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; + float minLod = {}; + float maxLod = {}; + VULKAN_HPP_NAMESPACE::BorderColor borderColor = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack; + VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates = {}; + + }; + static_assert( sizeof( SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SamplerCreateInfo; + }; + + struct SamplerYcbcrConversionCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::Filter chromaFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ = {}) VULKAN_HPP_NOEXCEPT + : format( format_ ), ycbcrModel( ycbcrModel_ ), ycbcrRange( ycbcrRange_ ), components( components_ ), xChromaOffset( xChromaOffset_ ), yChromaOffset( yChromaOffset_ ), chromaFilter( chromaFilter_ ), forceExplicitReconstruction( forceExplicitReconstruction_ ) + {} + + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SamplerYcbcrConversionCreateInfo & operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SamplerYcbcrConversionCreateInfo & operator=( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SamplerYcbcrConversionCreateInfo ) ); + return *this; + } + + SamplerYcbcrConversionCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & setYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ ) VULKAN_HPP_NOEXCEPT + { + ycbcrModel = ycbcrModel_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & setYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ ) VULKAN_HPP_NOEXCEPT + { + ycbcrRange = ycbcrRange_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT + { + components = components_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & setXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ ) VULKAN_HPP_NOEXCEPT + { + xChromaOffset = xChromaOffset_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & setYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ ) VULKAN_HPP_NOEXCEPT + { + yChromaOffset = yChromaOffset_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & setChromaFilter( VULKAN_HPP_NAMESPACE::Filter chromaFilter_ ) VULKAN_HPP_NOEXCEPT + { + chromaFilter = chromaFilter_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & setForceExplicitReconstruction( VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ ) VULKAN_HPP_NOEXCEPT + { + forceExplicitReconstruction = forceExplicitReconstruction_; + return *this; + } + + + operator VkSamplerYcbcrConversionCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerYcbcrConversionCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SamplerYcbcrConversionCreateInfo const& ) const = default; +#else + bool operator==( SamplerYcbcrConversionCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( format == rhs.format ) + && ( ycbcrModel == rhs.ycbcrModel ) + && ( ycbcrRange == rhs.ycbcrRange ) + && ( components == rhs.components ) + && ( xChromaOffset == rhs.xChromaOffset ) + && ( yChromaOffset == rhs.yChromaOffset ) + && ( chromaFilter == rhs.chromaFilter ) + && ( forceExplicitReconstruction == rhs.forceExplicitReconstruction ); + } + + bool operator!=( SamplerYcbcrConversionCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; + VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; + VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::Filter chromaFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction = {}; + + }; + static_assert( sizeof( SamplerYcbcrConversionCreateInfo ) == sizeof( VkSamplerYcbcrConversionCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SamplerYcbcrConversionCreateInfo; + }; + using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo; + + class SamplerYcbcrConversion + { + public: + using CType = VkSamplerYcbcrConversion; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion; + + public: + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion() VULKAN_HPP_NOEXCEPT + : m_samplerYcbcrConversion(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_samplerYcbcrConversion(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT SamplerYcbcrConversion( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT + : m_samplerYcbcrConversion( samplerYcbcrConversion ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + SamplerYcbcrConversion & operator=(VkSamplerYcbcrConversion samplerYcbcrConversion) VULKAN_HPP_NOEXCEPT + { + m_samplerYcbcrConversion = samplerYcbcrConversion; + return *this; + } +#endif + + SamplerYcbcrConversion & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_samplerYcbcrConversion = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SamplerYcbcrConversion const& ) const = default; +#else + bool operator==( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion == rhs.m_samplerYcbcrConversion; + } + + bool operator!=(SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion != rhs.m_samplerYcbcrConversion; + } + + bool operator<(SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion < rhs.m_samplerYcbcrConversion; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSamplerYcbcrConversion() const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion == VK_NULL_HANDLE; + } + + private: + VkSamplerYcbcrConversion m_samplerYcbcrConversion; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ) == sizeof( VkSamplerYcbcrConversion ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + using SamplerYcbcrConversionKHR = SamplerYcbcrConversion; + + struct SemaphoreCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo(VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SemaphoreCreateInfo & operator=( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SemaphoreCreateInfo & operator=( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SemaphoreCreateInfo ) ); + return *this; + } + + SemaphoreCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SemaphoreCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + + operator VkSemaphoreCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SemaphoreCreateInfo const& ) const = default; +#else + bool operator==( SemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ); + } + + bool operator!=( SemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags = {}; + + }; + static_assert( sizeof( SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SemaphoreCreateInfo; + }; + + struct ShaderModuleCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo(VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ = {}, size_t codeSize_ = {}, const uint32_t* pCode_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), codeSize( codeSize_ ), pCode( pCode_ ) + {} + + VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & code_ ) + : flags( flags_ ), codeSize( code_.size() * 4 ), pCode( code_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ShaderModuleCreateInfo & operator=( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ShaderModuleCreateInfo & operator=( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ShaderModuleCreateInfo ) ); + return *this; + } + + ShaderModuleCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ShaderModuleCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ShaderModuleCreateInfo & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT + { + codeSize = codeSize_; + return *this; + } + + ShaderModuleCreateInfo & setPCode( const uint32_t* pCode_ ) VULKAN_HPP_NOEXCEPT + { + pCode = pCode_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + ShaderModuleCreateInfo & setCode( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & code_ ) VULKAN_HPP_NOEXCEPT + { + codeSize = code_.size() * 4; + pCode = code_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkShaderModuleCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderModuleCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ShaderModuleCreateInfo const& ) const = default; +#else + bool operator==( ShaderModuleCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( codeSize == rhs.codeSize ) + && ( pCode == rhs.pCode ); + } + + bool operator!=( ShaderModuleCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags = {}; + size_t codeSize = {}; + const uint32_t* pCode = {}; + + }; + static_assert( sizeof( ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ShaderModuleCreateInfo; + }; + + class SurfaceKHR + { + public: + using CType = VkSurfaceKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR; + + public: + VULKAN_HPP_CONSTEXPR SurfaceKHR() VULKAN_HPP_NOEXCEPT + : m_surfaceKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR SurfaceKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_surfaceKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT SurfaceKHR( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT + : m_surfaceKHR( surfaceKHR ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + SurfaceKHR & operator=(VkSurfaceKHR surfaceKHR) VULKAN_HPP_NOEXCEPT + { + m_surfaceKHR = surfaceKHR; + return *this; + } +#endif + + SurfaceKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_surfaceKHR = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SurfaceKHR const& ) const = default; +#else + bool operator==( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR == rhs.m_surfaceKHR; + } + + bool operator!=(SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR != rhs.m_surfaceKHR; + } + + bool operator<(SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR < rhs.m_surfaceKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSurfaceKHR() const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR == VK_NULL_HANDLE; + } + + private: + VkSurfaceKHR m_surfaceKHR; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::SurfaceKHR ) == sizeof( VkSurfaceKHR ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::SurfaceKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct SwapchainCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, uint32_t minImageCount_ = {}, VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear, VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}, uint32_t imageArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t* pQueueFamilyIndices_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), surface( surface_ ), minImageCount( minImageCount_ ), imageFormat( imageFormat_ ), imageColorSpace( imageColorSpace_ ), imageExtent( imageExtent_ ), imageArrayLayers( imageArrayLayers_ ), imageUsage( imageUsage_ ), imageSharingMode( imageSharingMode_ ), queueFamilyIndexCount( queueFamilyIndexCount_ ), pQueueFamilyIndices( pQueueFamilyIndices_ ), preTransform( preTransform_ ), compositeAlpha( compositeAlpha_ ), presentMode( presentMode_ ), clipped( clipped_ ), oldSwapchain( oldSwapchain_ ) + {} + + VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_, VULKAN_HPP_NAMESPACE::SurfaceKHR surface_, uint32_t minImageCount_, VULKAN_HPP_NAMESPACE::Format imageFormat_, VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_, VULKAN_HPP_NAMESPACE::Extent2D imageExtent_, uint32_t imageArrayLayers_, VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_, VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {} ) + : flags( flags_ ), surface( surface_ ), minImageCount( minImageCount_ ), imageFormat( imageFormat_ ), imageColorSpace( imageColorSpace_ ), imageExtent( imageExtent_ ), imageArrayLayers( imageArrayLayers_ ), imageUsage( imageUsage_ ), imageSharingMode( imageSharingMode_ ), queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() ), preTransform( preTransform_ ), compositeAlpha( compositeAlpha_ ), presentMode( presentMode_ ), clipped( clipped_ ), oldSwapchain( oldSwapchain_ ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SwapchainCreateInfoKHR & operator=( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SwapchainCreateInfoKHR & operator=( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SwapchainCreateInfoKHR ) ); + return *this; + } + + SwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SwapchainCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + SwapchainCreateInfoKHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT + { + surface = surface_; + return *this; + } + + SwapchainCreateInfoKHR & setMinImageCount( uint32_t minImageCount_ ) VULKAN_HPP_NOEXCEPT + { + minImageCount = minImageCount_; + return *this; + } + + SwapchainCreateInfoKHR & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT + { + imageFormat = imageFormat_; + return *this; + } + + SwapchainCreateInfoKHR & setImageColorSpace( VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ ) VULKAN_HPP_NOEXCEPT + { + imageColorSpace = imageColorSpace_; + return *this; + } + + SwapchainCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } + + SwapchainCreateInfoKHR & setImageArrayLayers( uint32_t imageArrayLayers_ ) VULKAN_HPP_NOEXCEPT + { + imageArrayLayers = imageArrayLayers_; + return *this; + } + + SwapchainCreateInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT + { + imageUsage = imageUsage_; + return *this; + } + + SwapchainCreateInfoKHR & setImageSharingMode( VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ ) VULKAN_HPP_NOEXCEPT + { + imageSharingMode = imageSharingMode_; + return *this; + } + + SwapchainCreateInfoKHR & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + SwapchainCreateInfoKHR & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SwapchainCreateInfoKHR & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); + pQueueFamilyIndices = queueFamilyIndices_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + SwapchainCreateInfoKHR & setPreTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ ) VULKAN_HPP_NOEXCEPT + { + preTransform = preTransform_; + return *this; + } + + SwapchainCreateInfoKHR & setCompositeAlpha( VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ ) VULKAN_HPP_NOEXCEPT + { + compositeAlpha = compositeAlpha_; + return *this; + } + + SwapchainCreateInfoKHR & setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT + { + presentMode = presentMode_; + return *this; + } + + SwapchainCreateInfoKHR & setClipped( VULKAN_HPP_NAMESPACE::Bool32 clipped_ ) VULKAN_HPP_NOEXCEPT + { + clipped = clipped_; + return *this; + } + + SwapchainCreateInfoKHR & setOldSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ ) VULKAN_HPP_NOEXCEPT + { + oldSwapchain = oldSwapchain_; + return *this; + } + + + operator VkSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SwapchainCreateInfoKHR const& ) const = default; +#else + bool operator==( SwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( surface == rhs.surface ) + && ( minImageCount == rhs.minImageCount ) + && ( imageFormat == rhs.imageFormat ) + && ( imageColorSpace == rhs.imageColorSpace ) + && ( imageExtent == rhs.imageExtent ) + && ( imageArrayLayers == rhs.imageArrayLayers ) + && ( imageUsage == rhs.imageUsage ) + && ( imageSharingMode == rhs.imageSharingMode ) + && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) + && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) + && ( preTransform == rhs.preTransform ) + && ( compositeAlpha == rhs.compositeAlpha ) + && ( presentMode == rhs.presentMode ) + && ( clipped == rhs.clipped ) + && ( oldSwapchain == rhs.oldSwapchain ); + } + + bool operator!=( SwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCreateInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {}; + uint32_t minImageCount = {}; + VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear; + VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {}; + uint32_t imageArrayLayers = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {}; + VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t* pQueueFamilyIndices = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque; + VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate; + VULKAN_HPP_NAMESPACE::Bool32 clipped = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain = {}; + + }; + static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SwapchainCreateInfoKHR; + }; + + struct ValidationCacheCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationCacheCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT(VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ = {}, size_t initialDataSize_ = {}, const void* pInitialData_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), initialDataSize( initialDataSize_ ), pInitialData( pInitialData_ ) + {} + + VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + template + ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_ ) + : flags( flags_ ), initialDataSize( initialData_.size() * sizeof(T) ), pInitialData( initialData_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ValidationCacheCreateInfoEXT & operator=( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ValidationCacheCreateInfoEXT & operator=( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ValidationCacheCreateInfoEXT ) ); + return *this; + } + + ValidationCacheCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ValidationCacheCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ValidationCacheCreateInfoEXT & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT + { + initialDataSize = initialDataSize_; + return *this; + } + + ValidationCacheCreateInfoEXT & setPInitialData( const void* pInitialData_ ) VULKAN_HPP_NOEXCEPT + { + pInitialData = pInitialData_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + template + ValidationCacheCreateInfoEXT & setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_ ) VULKAN_HPP_NOEXCEPT + { + initialDataSize = initialData_.size() * sizeof(T); + pInitialData = initialData_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkValidationCacheCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ValidationCacheCreateInfoEXT const& ) const = default; +#else + bool operator==( ValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( initialDataSize == rhs.initialDataSize ) + && ( pInitialData == rhs.pInitialData ); + } + + bool operator!=( ValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationCacheCreateInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags = {}; + size_t initialDataSize = {}; + const void* pInitialData = {}; + + }; + static_assert( sizeof( ValidationCacheCreateInfoEXT ) == sizeof( VkValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ValidationCacheCreateInfoEXT; + }; + + class ValidationCacheEXT + { + public: + using CType = VkValidationCacheEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT; + + public: + VULKAN_HPP_CONSTEXPR ValidationCacheEXT() VULKAN_HPP_NOEXCEPT + : m_validationCacheEXT(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR ValidationCacheEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_validationCacheEXT(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT ValidationCacheEXT( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT + : m_validationCacheEXT( validationCacheEXT ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + ValidationCacheEXT & operator=(VkValidationCacheEXT validationCacheEXT) VULKAN_HPP_NOEXCEPT + { + m_validationCacheEXT = validationCacheEXT; + return *this; + } +#endif + + ValidationCacheEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_validationCacheEXT = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ValidationCacheEXT const& ) const = default; +#else + bool operator==( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT == rhs.m_validationCacheEXT; + } + + bool operator!=(ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT != rhs.m_validationCacheEXT; + } + + bool operator<(ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT < rhs.m_validationCacheEXT; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkValidationCacheEXT() const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT == VK_NULL_HANDLE; + } + + private: + VkValidationCacheEXT m_validationCacheEXT; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::ValidationCacheEXT ) == sizeof( VkValidationCacheEXT ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct DisplayPowerInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPowerInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT(VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff) VULKAN_HPP_NOEXCEPT + : powerState( powerState_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DisplayPowerInfoEXT & operator=( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DisplayPowerInfoEXT & operator=( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DisplayPowerInfoEXT ) ); + return *this; + } + + DisplayPowerInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DisplayPowerInfoEXT & setPowerState( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ ) VULKAN_HPP_NOEXCEPT + { + powerState = powerState_; + return *this; + } + + + operator VkDisplayPowerInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPowerInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayPowerInfoEXT const& ) const = default; +#else + bool operator==( DisplayPowerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( powerState == rhs.powerState ); + } + + bool operator!=( DisplayPowerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPowerInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff; + + }; + static_assert( sizeof( DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplayPowerInfoEXT; + }; + + struct MappedMemoryRange + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMappedMemoryRange; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MappedMemoryRange(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT + : memory( memory_ ), offset( offset_ ), size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR MappedMemoryRange( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MappedMemoryRange( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + MappedMemoryRange & operator=( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MappedMemoryRange & operator=( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( MappedMemoryRange ) ); + return *this; + } + + MappedMemoryRange & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MappedMemoryRange & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + MappedMemoryRange & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + MappedMemoryRange & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + + operator VkMappedMemoryRange const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMappedMemoryRange &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MappedMemoryRange const& ) const = default; +#else + bool operator==( MappedMemoryRange const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memory == rhs.memory ) + && ( offset == rhs.offset ) + && ( size == rhs.size ); + } + + bool operator!=( MappedMemoryRange const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMappedMemoryRange; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + + }; + static_assert( sizeof( MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MappedMemoryRange; + }; + + struct MemoryRequirements + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryRequirements(VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize alignment_ = {}, uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT + : size( size_ ), alignment( alignment_ ), memoryTypeBits( memoryTypeBits_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryRequirements( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryRequirements( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + MemoryRequirements & operator=( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryRequirements & operator=( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( MemoryRequirements ) ); + return *this; + } + + + operator VkMemoryRequirements const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryRequirements const& ) const = default; +#else + bool operator==( MemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( size == rhs.size ) + && ( alignment == rhs.alignment ) + && ( memoryTypeBits == rhs.memoryTypeBits ); + } + + bool operator!=( MemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceSize alignment = {}; + uint32_t memoryTypeBits = {}; + + }; + static_assert( sizeof( MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct MemoryRequirements2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryRequirements2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryRequirements2(VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {}) VULKAN_HPP_NOEXCEPT + : memoryRequirements( memoryRequirements_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryRequirements2( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + MemoryRequirements2 & operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryRequirements2 & operator=( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( MemoryRequirements2 ) ); + return *this; + } + + + operator VkMemoryRequirements2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryRequirements2 const& ) const = default; +#else + bool operator==( MemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memoryRequirements == rhs.memoryRequirements ); + } + + bool operator!=( MemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryRequirements2; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {}; + + }; + static_assert( sizeof( MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryRequirements2; + }; + using MemoryRequirements2KHR = MemoryRequirements2; + + struct DeviceGroupPresentCapabilitiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR(std::array const& presentMask_ = {}, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {}) VULKAN_HPP_NOEXCEPT + : presentMask( presentMask_ ), modes( modes_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupPresentCapabilitiesKHR( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DeviceGroupPresentCapabilitiesKHR & operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceGroupPresentCapabilitiesKHR & operator=( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DeviceGroupPresentCapabilitiesKHR ) ); + return *this; + } + + + operator VkDeviceGroupPresentCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupPresentCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceGroupPresentCapabilitiesKHR const& ) const = default; +#else + bool operator==( DeviceGroupPresentCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( presentMask == rhs.presentMask ) + && ( modes == rhs.modes ); + } + + bool operator!=( DeviceGroupPresentCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D presentMask = {}; + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {}; + + }; + static_assert( sizeof( DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceGroupPresentCapabilitiesKHR; + }; + + struct PhysicalDeviceSurfaceInfo2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSurfaceInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}) VULKAN_HPP_NOEXCEPT + : surface( surface_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDeviceSurfaceInfo2KHR & operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceSurfaceInfo2KHR & operator=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceSurfaceInfo2KHR ) ); + return *this; + } + + PhysicalDeviceSurfaceInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceSurfaceInfo2KHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT + { + surface = surface_; + return *this; + } + + + operator VkPhysicalDeviceSurfaceInfo2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSurfaceInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceSurfaceInfo2KHR const& ) const = default; +#else + bool operator==( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( surface == rhs.surface ); + } + + bool operator!=( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {}; + + }; + static_assert( sizeof( PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceSurfaceInfo2KHR; + }; + + struct DeviceMemoryOpaqueCaptureAddressInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}) VULKAN_HPP_NOEXCEPT + : memory( memory_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceMemoryOpaqueCaptureAddressInfo( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DeviceMemoryOpaqueCaptureAddressInfo & operator=( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceMemoryOpaqueCaptureAddressInfo & operator=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DeviceMemoryOpaqueCaptureAddressInfo ) ); + return *this; + } + + DeviceMemoryOpaqueCaptureAddressInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceMemoryOpaqueCaptureAddressInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + + operator VkDeviceMemoryOpaqueCaptureAddressInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceMemoryOpaqueCaptureAddressInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceMemoryOpaqueCaptureAddressInfo const& ) const = default; +#else + bool operator==( DeviceMemoryOpaqueCaptureAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memory == rhs.memory ); + } + + bool operator!=( DeviceMemoryOpaqueCaptureAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + + }; + static_assert( sizeof( DeviceMemoryOpaqueCaptureAddressInfo ) == sizeof( VkDeviceMemoryOpaqueCaptureAddressInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceMemoryOpaqueCaptureAddressInfo; + }; + using DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo; + + struct PresentInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentInfoKHR(uint32_t waitSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {}, uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains_ = {}, const uint32_t* pImageIndices_ = {}, VULKAN_HPP_NAMESPACE::Result* pResults_ = {}) VULKAN_HPP_NOEXCEPT + : waitSemaphoreCount( waitSemaphoreCount_ ), pWaitSemaphores( pWaitSemaphores_ ), swapchainCount( swapchainCount_ ), pSwapchains( pSwapchains_ ), pImageIndices( pImageIndices_ ), pResults( pResults_ ) + {} + + VULKAN_HPP_CONSTEXPR PresentInfoKHR( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentInfoKHR( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & swapchains_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageIndices_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & results_ = {} ) + : waitSemaphoreCount( static_cast( waitSemaphores_.size() ) ), pWaitSemaphores( waitSemaphores_.data() ), swapchainCount( static_cast( swapchains_.size() ) ), pSwapchains( swapchains_.data() ), pImageIndices( imageIndices_.data() ), pResults( results_.data() ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( swapchains_.size() == imageIndices_.size() ); + VULKAN_HPP_ASSERT( results_.empty() || ( swapchains_.size() == results_.size() ) ); + VULKAN_HPP_ASSERT( results_.empty() || ( imageIndices_.size() == results_.size() ) ); +#else + if ( swapchains_.size() != imageIndices_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: swapchains_.size() != imageIndices_.size()" ); + } + if ( !results_.empty() && ( swapchains_.size() != results_.size() ) ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( swapchains_.size() != results_.size() )" ); + } + if ( !results_.empty() && ( imageIndices_.size() != results_.size() ) ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( imageIndices_.size() != results_.size() )" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PresentInfoKHR & operator=( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PresentInfoKHR & operator=( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PresentInfoKHR ) ); + return *this; + } + + PresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PresentInfoKHR & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + PresentInfoKHR & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphores = pWaitSemaphores_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PresentInfoKHR & setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitSemaphores_.size() ); + pWaitSemaphores = waitSemaphores_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + PresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + PresentInfoKHR & setPSwapchains( const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains_ ) VULKAN_HPP_NOEXCEPT + { + pSwapchains = pSwapchains_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PresentInfoKHR & setSwapchains( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & swapchains_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( swapchains_.size() ); + pSwapchains = swapchains_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + PresentInfoKHR & setPImageIndices( const uint32_t* pImageIndices_ ) VULKAN_HPP_NOEXCEPT + { + pImageIndices = pImageIndices_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PresentInfoKHR & setImageIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageIndices_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( imageIndices_.size() ); + pImageIndices = imageIndices_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + PresentInfoKHR & setPResults( VULKAN_HPP_NAMESPACE::Result* pResults_ ) VULKAN_HPP_NOEXCEPT + { + pResults = pResults_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PresentInfoKHR & setResults( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & results_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( results_.size() ); + pResults = results_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PresentInfoKHR const& ) const = default; +#else + bool operator==( PresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) + && ( pWaitSemaphores == rhs.pWaitSemaphores ) + && ( swapchainCount == rhs.swapchainCount ) + && ( pSwapchains == rhs.pSwapchains ) + && ( pImageIndices == rhs.pImageIndices ) + && ( pResults == rhs.pResults ); + } + + bool operator!=( PresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentInfoKHR; + const void* pNext = {}; + uint32_t waitSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains = {}; + const uint32_t* pImageIndices = {}; + VULKAN_HPP_NAMESPACE::Result* pResults = {}; + + }; + static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PresentInfoKHR; + }; + + struct SubmitInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubmitInfo(uint32_t waitSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {}, const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask_ = {}, uint32_t commandBufferCount_ = {}, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers_ = {}, uint32_t signalSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ = {}) VULKAN_HPP_NOEXCEPT + : waitSemaphoreCount( waitSemaphoreCount_ ), pWaitSemaphores( pWaitSemaphores_ ), pWaitDstStageMask( pWaitDstStageMask_ ), commandBufferCount( commandBufferCount_ ), pCommandBuffers( pCommandBuffers_ ), signalSemaphoreCount( signalSemaphoreCount_ ), pSignalSemaphores( pSignalSemaphores_ ) + {} + + VULKAN_HPP_CONSTEXPR SubmitInfo( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubmitInfo( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitDstStageMask_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBuffers_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphores_ = {} ) + : waitSemaphoreCount( static_cast( waitSemaphores_.size() ) ), pWaitSemaphores( waitSemaphores_.data() ), pWaitDstStageMask( waitDstStageMask_.data() ), commandBufferCount( static_cast( commandBuffers_.size() ) ), pCommandBuffers( commandBuffers_.data() ), signalSemaphoreCount( static_cast( signalSemaphores_.size() ) ), pSignalSemaphores( signalSemaphores_.data() ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( waitSemaphores_.size() == waitDstStageMask_.size() ); +#else + if ( waitSemaphores_.size() != waitDstStageMask_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SubmitInfo::SubmitInfo: waitSemaphores_.size() != waitDstStageMask_.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SubmitInfo & operator=( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SubmitInfo & operator=( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SubmitInfo ) ); + return *this; + } + + SubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + SubmitInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphores = pWaitSemaphores_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SubmitInfo & setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitSemaphores_.size() ); + pWaitSemaphores = waitSemaphores_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + SubmitInfo & setPWaitDstStageMask( const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + pWaitDstStageMask = pWaitDstStageMask_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SubmitInfo & setWaitDstStageMask( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitDstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitDstStageMask_.size() ); + pWaitDstStageMask = waitDstStageMask_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + SubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferCount = commandBufferCount_; + return *this; + } + + SubmitInfo & setPCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers_ ) VULKAN_HPP_NOEXCEPT + { + pCommandBuffers = pCommandBuffers_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SubmitInfo & setCommandBuffers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBuffers_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferCount = static_cast( commandBuffers_.size() ); + pCommandBuffers = commandBuffers_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + SubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = signalSemaphoreCount_; + return *this; + } + + SubmitInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphores = pSignalSemaphores_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SubmitInfo & setSignalSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = static_cast( signalSemaphores_.size() ); + pSignalSemaphores = signalSemaphores_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkSubmitInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SubmitInfo const& ) const = default; +#else + bool operator==( SubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) + && ( pWaitSemaphores == rhs.pWaitSemaphores ) + && ( pWaitDstStageMask == rhs.pWaitDstStageMask ) + && ( commandBufferCount == rhs.commandBufferCount ) + && ( pCommandBuffers == rhs.pCommandBuffers ) + && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) + && ( pSignalSemaphores == rhs.pSignalSemaphores ); + } + + bool operator!=( SubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo; + const void* pNext = {}; + uint32_t waitSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores = {}; + const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask = {}; + uint32_t commandBufferCount = {}; + const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers = {}; + uint32_t signalSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores = {}; + + }; + static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SubmitInfo; + }; + + class Queue + { + public: + using CType = VkQueue; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueue; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue; + + public: + VULKAN_HPP_CONSTEXPR Queue() VULKAN_HPP_NOEXCEPT + : m_queue(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Queue( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_queue(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Queue( VkQueue queue ) VULKAN_HPP_NOEXCEPT + : m_queue( queue ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Queue & operator=(VkQueue queue) VULKAN_HPP_NOEXCEPT + { + m_queue = queue; + return *this; + } +#endif + + Queue & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_queue = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Queue const& ) const = default; +#else + bool operator==( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queue == rhs.m_queue; + } + + bool operator!=(Queue const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queue != rhs.m_queue; + } + + bool operator<(Queue const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queue < rhs.m_queue; + } +#endif + + template + void getCheckpointDataNV( uint32_t* pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointDataNV* pCheckpointData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getCheckpointDataNV(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + std::vector getCheckpointDataNV(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result bindSparse( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindSparseInfo* pBindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type bindSparse( ArrayProxy const &bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endDebugUtilsLabelEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void endDebugUtilsLabelEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR* pPresentInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result presentKHR( const PresentInfoKHR & presentInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo* pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type submit( ArrayProxy const &submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result waitIdle(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type waitIdle(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueue() const VULKAN_HPP_NOEXCEPT + { + return m_queue; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_queue != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_queue == VK_NULL_HANDLE; + } + + private: + VkQueue m_queue; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Queue ) == sizeof( VkQueue ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Queue; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Queue; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Queue; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct DeviceQueueInfo2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceQueueInfo2(VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}, uint32_t queueIndex_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ ), queueIndex( queueIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DeviceQueueInfo2 & operator=( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceQueueInfo2 & operator=( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DeviceQueueInfo2 ) ); + return *this; + } + + DeviceQueueInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceQueueInfo2 & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DeviceQueueInfo2 & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + DeviceQueueInfo2 & setQueueIndex( uint32_t queueIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueIndex = queueIndex_; + return *this; + } + + + operator VkDeviceQueueInfo2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceQueueInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceQueueInfo2 const& ) const = default; +#else + bool operator==( DeviceQueueInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( queueFamilyIndex == rhs.queueFamilyIndex ) + && ( queueIndex == rhs.queueIndex ); + } + + bool operator!=( DeviceQueueInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueInfo2; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {}; + uint32_t queueFamilyIndex = {}; + uint32_t queueIndex = {}; + + }; + static_assert( sizeof( DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceQueueInfo2; + }; + + struct FenceGetFdInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT + : fence( fence_ ), handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + FenceGetFdInfoKHR & operator=( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + FenceGetFdInfoKHR & operator=( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( FenceGetFdInfoKHR ) ); + return *this; + } + + FenceGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + FenceGetFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + FenceGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + + operator VkFenceGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFenceGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( FenceGetFdInfoKHR const& ) const = default; +#else + bool operator==( FenceGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( fence == rhs.fence ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( FenceGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetFdInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + + }; + static_assert( sizeof( FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = FenceGetFdInfoKHR; + }; + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct FenceGetWin32HandleInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetWin32HandleInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT + : fence( fence_ ), handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + FenceGetWin32HandleInfoKHR & operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + FenceGetWin32HandleInfoKHR & operator=( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( FenceGetWin32HandleInfoKHR ) ); + return *this; + } + + FenceGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + FenceGetWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + FenceGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + + operator VkFenceGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFenceGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( FenceGetWin32HandleInfoKHR const& ) const = default; +#else + bool operator==( FenceGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( fence == rhs.fence ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( FenceGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + + }; + static_assert( sizeof( FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = FenceGetWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct GeneratedCommandsMemoryRequirementsInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, uint32_t maxSequencesCount_ = {}) VULKAN_HPP_NOEXCEPT + : pipelineBindPoint( pipelineBindPoint_ ), pipeline( pipeline_ ), indirectCommandsLayout( indirectCommandsLayout_ ), maxSequencesCount( maxSequencesCount_ ) + {} + + VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeneratedCommandsMemoryRequirementsInfoNV( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + GeneratedCommandsMemoryRequirementsInfoNV & operator=( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + GeneratedCommandsMemoryRequirementsInfoNV & operator=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( GeneratedCommandsMemoryRequirementsInfoNV ) ); + return *this; + } + + GeneratedCommandsMemoryRequirementsInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + GeneratedCommandsMemoryRequirementsInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + GeneratedCommandsMemoryRequirementsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } + + GeneratedCommandsMemoryRequirementsInfoNV & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT + { + indirectCommandsLayout = indirectCommandsLayout_; + return *this; + } + + GeneratedCommandsMemoryRequirementsInfoNV & setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT + { + maxSequencesCount = maxSequencesCount_; + return *this; + } + + + operator VkGeneratedCommandsMemoryRequirementsInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( GeneratedCommandsMemoryRequirementsInfoNV const& ) const = default; +#else + bool operator==( GeneratedCommandsMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pipelineBindPoint == rhs.pipelineBindPoint ) + && ( pipeline == rhs.pipeline ) + && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) + && ( maxSequencesCount == rhs.maxSequencesCount ); + } + + bool operator!=( GeneratedCommandsMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {}; + uint32_t maxSequencesCount = {}; + + }; + static_assert( sizeof( GeneratedCommandsMemoryRequirementsInfoNV ) == sizeof( VkGeneratedCommandsMemoryRequirementsInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = GeneratedCommandsMemoryRequirementsInfoNV; + }; + + struct ImageDrmFormatModifierPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT(uint64_t drmFormatModifier_ = {}) VULKAN_HPP_NOEXCEPT + : drmFormatModifier( drmFormatModifier_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageDrmFormatModifierPropertiesEXT( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ImageDrmFormatModifierPropertiesEXT & operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageDrmFormatModifierPropertiesEXT & operator=( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImageDrmFormatModifierPropertiesEXT ) ); + return *this; + } + + + operator VkImageDrmFormatModifierPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageDrmFormatModifierPropertiesEXT const& ) const = default; +#else + bool operator==( ImageDrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( drmFormatModifier == rhs.drmFormatModifier ); + } + + bool operator!=( ImageDrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT; + void* pNext = {}; + uint64_t drmFormatModifier = {}; + + }; + static_assert( sizeof( ImageDrmFormatModifierPropertiesEXT ) == sizeof( VkImageDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageDrmFormatModifierPropertiesEXT; + }; + + struct ImageMemoryRequirementsInfo2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryRequirementsInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2(VULKAN_HPP_NAMESPACE::Image image_ = {}) VULKAN_HPP_NOEXCEPT + : image( image_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ImageMemoryRequirementsInfo2 & operator=( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageMemoryRequirementsInfo2 & operator=( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImageMemoryRequirementsInfo2 ) ); + return *this; + } + + ImageMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + + operator VkImageMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageMemoryRequirementsInfo2 const& ) const = default; +#else + bool operator==( ImageMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( image == rhs.image ); + } + + bool operator!=( ImageMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryRequirementsInfo2; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + + }; + static_assert( sizeof( ImageMemoryRequirementsInfo2 ) == sizeof( VkImageMemoryRequirementsInfo2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageMemoryRequirementsInfo2; + }; + using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2; + + struct SparseImageFormatProperties + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageFormatProperties(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageGranularity_ = {}, VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags_ = {}) VULKAN_HPP_NOEXCEPT + : aspectMask( aspectMask_ ), imageGranularity( imageGranularity_ ), flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR SparseImageFormatProperties( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageFormatProperties( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SparseImageFormatProperties & operator=( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SparseImageFormatProperties & operator=( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SparseImageFormatProperties ) ); + return *this; + } + + + operator VkSparseImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SparseImageFormatProperties const& ) const = default; +#else + bool operator==( SparseImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( aspectMask == rhs.aspectMask ) + && ( imageGranularity == rhs.imageGranularity ) + && ( flags == rhs.flags ); + } + + bool operator!=( SparseImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageGranularity = {}; + VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags = {}; + + }; + static_assert( sizeof( SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SparseImageMemoryRequirements + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements(VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties_ = {}, uint32_t imageMipTailFirstLod_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride_ = {}) VULKAN_HPP_NOEXCEPT + : formatProperties( formatProperties_ ), imageMipTailFirstLod( imageMipTailFirstLod_ ), imageMipTailSize( imageMipTailSize_ ), imageMipTailOffset( imageMipTailOffset_ ), imageMipTailStride( imageMipTailStride_ ) + {} + + VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryRequirements( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SparseImageMemoryRequirements & operator=( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SparseImageMemoryRequirements & operator=( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SparseImageMemoryRequirements ) ); + return *this; + } + + + operator VkSparseImageMemoryRequirements const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SparseImageMemoryRequirements const& ) const = default; +#else + bool operator==( SparseImageMemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( formatProperties == rhs.formatProperties ) + && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod ) + && ( imageMipTailSize == rhs.imageMipTailSize ) + && ( imageMipTailOffset == rhs.imageMipTailOffset ) + && ( imageMipTailStride == rhs.imageMipTailStride ); + } + + bool operator!=( SparseImageMemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties = {}; + uint32_t imageMipTailFirstLod = {}; + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride = {}; + + }; + static_assert( sizeof( SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageSparseMemoryRequirementsInfo2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSparseMemoryRequirementsInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2(VULKAN_HPP_NAMESPACE::Image image_ = {}) VULKAN_HPP_NOEXCEPT + : image( image_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ImageSparseMemoryRequirementsInfo2 & operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageSparseMemoryRequirementsInfo2 & operator=( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImageSparseMemoryRequirementsInfo2 ) ); + return *this; + } + + ImageSparseMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageSparseMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + + operator VkImageSparseMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSparseMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageSparseMemoryRequirementsInfo2 const& ) const = default; +#else + bool operator==( ImageSparseMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( image == rhs.image ); + } + + bool operator!=( ImageSparseMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + + }; + static_assert( sizeof( ImageSparseMemoryRequirementsInfo2 ) == sizeof( VkImageSparseMemoryRequirementsInfo2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageSparseMemoryRequirementsInfo2; + }; + using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2; + + struct SparseImageMemoryRequirements2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageMemoryRequirements2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2(VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements_ = {}) VULKAN_HPP_NOEXCEPT + : memoryRequirements( memoryRequirements_ ) + {} + + VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryRequirements2( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SparseImageMemoryRequirements2 & operator=( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SparseImageMemoryRequirements2 & operator=( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SparseImageMemoryRequirements2 ) ); + return *this; + } + + + operator VkSparseImageMemoryRequirements2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SparseImageMemoryRequirements2 const& ) const = default; +#else + bool operator==( SparseImageMemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memoryRequirements == rhs.memoryRequirements ); + } + + bool operator!=( SparseImageMemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageMemoryRequirements2; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements = {}; + + }; + static_assert( sizeof( SparseImageMemoryRequirements2 ) == sizeof( VkSparseImageMemoryRequirements2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SparseImageMemoryRequirements2; + }; + using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2; + + struct SubresourceLayout + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubresourceLayout(VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ = {}) VULKAN_HPP_NOEXCEPT + : offset( offset_ ), size( size_ ), rowPitch( rowPitch_ ), arrayPitch( arrayPitch_ ), depthPitch( depthPitch_ ) + {} + + VULKAN_HPP_CONSTEXPR SubresourceLayout( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubresourceLayout( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SubresourceLayout & operator=( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SubresourceLayout & operator=( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SubresourceLayout ) ); + return *this; + } + + + operator VkSubresourceLayout const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubresourceLayout &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SubresourceLayout const& ) const = default; +#else + bool operator==( SubresourceLayout const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( offset == rhs.offset ) + && ( size == rhs.size ) + && ( rowPitch == rhs.rowPitch ) + && ( arrayPitch == rhs.arrayPitch ) + && ( depthPitch == rhs.depthPitch ); + } + + bool operator!=( SubresourceLayout const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceSize rowPitch = {}; + VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch = {}; + VULKAN_HPP_NAMESPACE::DeviceSize depthPitch = {}; + + }; + static_assert( sizeof( SubresourceLayout ) == sizeof( VkSubresourceLayout ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageViewAddressPropertiesNVX + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAddressPropertiesNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT + : deviceAddress( deviceAddress_ ), size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewAddressPropertiesNVX( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ImageViewAddressPropertiesNVX & operator=( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageViewAddressPropertiesNVX & operator=( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImageViewAddressPropertiesNVX ) ); + return *this; + } + + + operator VkImageViewAddressPropertiesNVX const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewAddressPropertiesNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageViewAddressPropertiesNVX const& ) const = default; +#else + bool operator==( ImageViewAddressPropertiesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( deviceAddress == rhs.deviceAddress ) + && ( size == rhs.size ); + } + + bool operator!=( ImageViewAddressPropertiesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAddressPropertiesNVX; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + + }; + static_assert( sizeof( ImageViewAddressPropertiesNVX ) == sizeof( VkImageViewAddressPropertiesNVX ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageViewAddressPropertiesNVX; + }; + + struct ImageViewHandleInfoNVX + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewHandleInfoNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX(VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}) VULKAN_HPP_NOEXCEPT + : imageView( imageView_ ), descriptorType( descriptorType_ ), sampler( sampler_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImageViewHandleInfoNVX( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - ImageViewHandleInfoNVX& operator=( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + ImageViewHandleInfoNVX & operator=( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageViewHandleInfoNVX & operator=( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImageViewHandleInfoNVX ) ); return *this; } @@ -37759,6 +44718,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkImageViewHandleInfoNVX const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -37769,6 +44729,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageViewHandleInfoNVX const& ) const = default; +#else bool operator==( ImageViewHandleInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -37782,6 +44746,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewHandleInfoNVX; @@ -37789,169 +44756,1518 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageView imageView = {}; VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; VULKAN_HPP_NAMESPACE::Sampler sampler = {}; + }; static_assert( sizeof( ImageViewHandleInfoNVX ) == sizeof( VkImageViewHandleInfoNVX ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct ImageViewUsageCreateInfo + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {} ) VULKAN_HPP_NOEXCEPT - : usage( usage_ ) - {} - - VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo ) - offsetof( ImageViewUsageCreateInfo, pNext ) ); - return *this; - } - - ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageViewUsageCreateInfo& operator=( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageViewUsageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImageViewUsageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT - { - usage = usage_; - return *this; - } - - operator VkImageViewUsageCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageViewUsageCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageViewUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( usage == rhs.usage ); - } - - bool operator!=( ImageViewUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewUsageCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + using Type = ImageViewHandleInfoNVX; }; - static_assert( sizeof( ImageViewUsageCreateInfo ) == sizeof( VkImageViewUsageCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #ifdef VK_USE_PLATFORM_ANDROID_KHR - - struct ImportAndroidHardwareBufferInfoANDROID + struct MemoryGetAndroidHardwareBufferInfoANDROID { - VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer* buffer_ = {} ) VULKAN_HPP_NOEXCEPT - : buffer( buffer_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}) VULKAN_HPP_NOEXCEPT + : memory( memory_ ) {} - VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID & operator=( VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID ) - offsetof( ImportAndroidHardwareBufferInfoANDROID, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - ImportAndroidHardwareBufferInfoANDROID& operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryGetAndroidHardwareBufferInfoANDROID & operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImportAndroidHardwareBufferInfoANDROID & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + MemoryGetAndroidHardwareBufferInfoANDROID & operator=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) ); + return *this; + } + + MemoryGetAndroidHardwareBufferInfoANDROID & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ImportAndroidHardwareBufferInfoANDROID & setBuffer( struct AHardwareBuffer* buffer_ ) VULKAN_HPP_NOEXCEPT + MemoryGetAndroidHardwareBufferInfoANDROID & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT { - buffer = buffer_; + memory = memory_; return *this; } - operator VkImportAndroidHardwareBufferInfoANDROID const&() const VULKAN_HPP_NOEXCEPT + + operator VkMemoryGetAndroidHardwareBufferInfoANDROID const&() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImportAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT + operator VkMemoryGetAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - bool operator==( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryGetAndroidHardwareBufferInfoANDROID const& ) const = default; +#else + bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) - && ( buffer == rhs.buffer ); + && ( memory == rhs.memory ); } - bool operator!=( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif + + public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; const void* pNext = {}; - struct AHardwareBuffer* buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + + }; + static_assert( sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) == sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryGetAndroidHardwareBufferInfoANDROID; }; - static_assert( sizeof( ImportAndroidHardwareBufferInfoANDROID ) == sizeof( VkImportAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + struct MemoryGetFdInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT + : memory( memory_ ), handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + MemoryGetFdInfoKHR & operator=( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryGetFdInfoKHR & operator=( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( MemoryGetFdInfoKHR ) ); + return *this; + } + + MemoryGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryGetFdInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + MemoryGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + + operator VkMemoryGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryGetFdInfoKHR const& ) const = default; +#else + bool operator==( MemoryGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memory == rhs.memory ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( MemoryGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetFdInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + + }; + static_assert( sizeof( MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryGetFdInfoKHR; + }; + + struct MemoryFdPropertiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryFdPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR(uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT + : memoryTypeBits( memoryTypeBits_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryFdPropertiesKHR( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + MemoryFdPropertiesKHR & operator=( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryFdPropertiesKHR & operator=( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( MemoryFdPropertiesKHR ) ); + return *this; + } + + + operator VkMemoryFdPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryFdPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryFdPropertiesKHR const& ) const = default; +#else + bool operator==( MemoryFdPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memoryTypeBits == rhs.memoryTypeBits ); + } + + bool operator!=( MemoryFdPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryFdPropertiesKHR; + void* pNext = {}; + uint32_t memoryTypeBits = {}; + + }; + static_assert( sizeof( MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryFdPropertiesKHR; + }; + + struct MemoryHostPointerPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryHostPointerPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT(uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT + : memoryTypeBits( memoryTypeBits_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + MemoryHostPointerPropertiesEXT & operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryHostPointerPropertiesEXT & operator=( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( MemoryHostPointerPropertiesEXT ) ); + return *this; + } + + + operator VkMemoryHostPointerPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryHostPointerPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryHostPointerPropertiesEXT const& ) const = default; +#else + bool operator==( MemoryHostPointerPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memoryTypeBits == rhs.memoryTypeBits ); + } + + bool operator!=( MemoryHostPointerPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT; + void* pNext = {}; + uint32_t memoryTypeBits = {}; + + }; + static_assert( sizeof( MemoryHostPointerPropertiesEXT ) == sizeof( VkMemoryHostPointerPropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryHostPointerPropertiesEXT; + }; + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct MemoryGetWin32HandleInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetWin32HandleInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT + : memory( memory_ ), handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + MemoryGetWin32HandleInfoKHR & operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryGetWin32HandleInfoKHR & operator=( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( MemoryGetWin32HandleInfoKHR ) ); + return *this; + } + + MemoryGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryGetWin32HandleInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + MemoryGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + + operator VkMemoryGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryGetWin32HandleInfoKHR const& ) const = default; +#else + bool operator==( MemoryGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memory == rhs.memory ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( MemoryGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + + }; + static_assert( sizeof( MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryGetWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct MemoryWin32HandlePropertiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryWin32HandlePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR(uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT + : memoryTypeBits( memoryTypeBits_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryWin32HandlePropertiesKHR( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + MemoryWin32HandlePropertiesKHR & operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryWin32HandlePropertiesKHR & operator=( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( MemoryWin32HandlePropertiesKHR ) ); + return *this; + } + + + operator VkMemoryWin32HandlePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryWin32HandlePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryWin32HandlePropertiesKHR const& ) const = default; +#else + bool operator==( MemoryWin32HandlePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memoryTypeBits == rhs.memoryTypeBits ); + } + + bool operator!=( MemoryWin32HandlePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR; + void* pNext = {}; + uint32_t memoryTypeBits = {}; + + }; + static_assert( sizeof( MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryWin32HandlePropertiesKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct PastPresentationTimingGOOGLE + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE(uint32_t presentID_ = {}, uint64_t desiredPresentTime_ = {}, uint64_t actualPresentTime_ = {}, uint64_t earliestPresentTime_ = {}, uint64_t presentMargin_ = {}) VULKAN_HPP_NOEXCEPT + : presentID( presentID_ ), desiredPresentTime( desiredPresentTime_ ), actualPresentTime( actualPresentTime_ ), earliestPresentTime( earliestPresentTime_ ), presentMargin( presentMargin_ ) + {} + + VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PastPresentationTimingGOOGLE( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PastPresentationTimingGOOGLE & operator=( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PastPresentationTimingGOOGLE & operator=( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PastPresentationTimingGOOGLE ) ); + return *this; + } + + + operator VkPastPresentationTimingGOOGLE const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPastPresentationTimingGOOGLE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PastPresentationTimingGOOGLE const& ) const = default; +#else + bool operator==( PastPresentationTimingGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( presentID == rhs.presentID ) + && ( desiredPresentTime == rhs.desiredPresentTime ) + && ( actualPresentTime == rhs.actualPresentTime ) + && ( earliestPresentTime == rhs.earliestPresentTime ) + && ( presentMargin == rhs.presentMargin ); + } + + bool operator!=( PastPresentationTimingGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + uint32_t presentID = {}; + uint64_t desiredPresentTime = {}; + uint64_t actualPresentTime = {}; + uint64_t earliestPresentTime = {}; + uint64_t presentMargin = {}; + + }; + static_assert( sizeof( PastPresentationTimingGOOGLE ) == sizeof( VkPastPresentationTimingGOOGLE ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + union PerformanceValueDataINTEL + { + PerformanceValueDataINTEL( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const& rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) ); + } + + PerformanceValueDataINTEL( uint32_t value32_ = {} ) + : value32( value32_ ) + {} + + PerformanceValueDataINTEL( uint64_t value64_ ) + : value64( value64_ ) + {} + + PerformanceValueDataINTEL( float valueFloat_ ) + : valueFloat( valueFloat_ ) + {} + + PerformanceValueDataINTEL( const char* valueString_ ) + : valueString( valueString_ ) + {} + + PerformanceValueDataINTEL & setValue32( uint32_t value32_ ) VULKAN_HPP_NOEXCEPT + { + value32 = value32_; + return *this; + } + + PerformanceValueDataINTEL & setValue64( uint64_t value64_ ) VULKAN_HPP_NOEXCEPT + { + value64 = value64_; + return *this; + } + + PerformanceValueDataINTEL & setValueFloat( float valueFloat_ ) VULKAN_HPP_NOEXCEPT + { + valueFloat = valueFloat_; + return *this; + } + + PerformanceValueDataINTEL & setValueBool( VULKAN_HPP_NAMESPACE::Bool32 valueBool_ ) VULKAN_HPP_NOEXCEPT + { + valueBool = valueBool_; + return *this; + } + + PerformanceValueDataINTEL & setValueString( const char* valueString_ ) VULKAN_HPP_NOEXCEPT + { + valueString = valueString_; + return *this; + } + + VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL & operator=( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) ); + return *this; + } + + operator VkPerformanceValueDataINTEL const&() const + { + return *reinterpret_cast(this); + } + + operator VkPerformanceValueDataINTEL &() + { + return *reinterpret_cast(this); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + uint32_t value32; + uint64_t value64; + float valueFloat; + VULKAN_HPP_NAMESPACE::Bool32 valueBool; + const char* valueString; +#else + uint32_t value32; + uint64_t value64; + float valueFloat; + VkBool32 valueBool; + const char* valueString; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct PerformanceValueINTEL + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + PerformanceValueINTEL(VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32, VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ = {}) VULKAN_HPP_NOEXCEPT + : type( type_ ), data( data_ ) + {} + + PerformanceValueINTEL( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceValueINTEL( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PerformanceValueINTEL & operator=( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PerformanceValueINTEL & operator=( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PerformanceValueINTEL ) ); + return *this; + } + + PerformanceValueINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + PerformanceValueINTEL & setData( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } + + + operator VkPerformanceValueINTEL const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceValueINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + + + + public: + VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32; + VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data = {}; + + }; + static_assert( sizeof( PerformanceValueINTEL ) == sizeof( VkPerformanceValueINTEL ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineExecutableInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, uint32_t executableIndex_ = {}) VULKAN_HPP_NOEXCEPT + : pipeline( pipeline_ ), executableIndex( executableIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutableInfoKHR( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PipelineExecutableInfoKHR & operator=( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineExecutableInfoKHR & operator=( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineExecutableInfoKHR ) ); + return *this; + } + + PipelineExecutableInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineExecutableInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } + + PipelineExecutableInfoKHR & setExecutableIndex( uint32_t executableIndex_ ) VULKAN_HPP_NOEXCEPT + { + executableIndex = executableIndex_; + return *this; + } + + + operator VkPipelineExecutableInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutableInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineExecutableInfoKHR const& ) const = default; +#else + bool operator==( PipelineExecutableInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pipeline == rhs.pipeline ) + && ( executableIndex == rhs.executableIndex ); + } + + bool operator!=( PipelineExecutableInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + uint32_t executableIndex = {}; + + }; + static_assert( sizeof( PipelineExecutableInfoKHR ) == sizeof( VkPipelineExecutableInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineExecutableInfoKHR; + }; + + struct PipelineExecutableInternalRepresentationKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInternalRepresentationKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR(std::array const& name_ = {}, std::array const& description_ = {}, VULKAN_HPP_NAMESPACE::Bool32 isText_ = {}, size_t dataSize_ = {}, void* pData_ = {}) VULKAN_HPP_NOEXCEPT + : name( name_ ), description( description_ ), isText( isText_ ), dataSize( dataSize_ ), pData( pData_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + template + PipelineExecutableInternalRepresentationKHR( std::array const& name_, std::array const& description_, VULKAN_HPP_NAMESPACE::Bool32 isText_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) + : name( name_ ), description( description_ ), isText( isText_ ), dataSize( data_.size() * sizeof(T) ), pData( data_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PipelineExecutableInternalRepresentationKHR & operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineExecutableInternalRepresentationKHR & operator=( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineExecutableInternalRepresentationKHR ) ); + return *this; + } + + + operator VkPipelineExecutableInternalRepresentationKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutableInternalRepresentationKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineExecutableInternalRepresentationKHR const& ) const = default; +#else + bool operator==( PipelineExecutableInternalRepresentationKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( name == rhs.name ) + && ( description == rhs.description ) + && ( isText == rhs.isText ) + && ( dataSize == rhs.dataSize ) + && ( pData == rhs.pData ); + } + + bool operator!=( PipelineExecutableInternalRepresentationKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInternalRepresentationKHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + VULKAN_HPP_NAMESPACE::Bool32 isText = {}; + size_t dataSize = {}; + void* pData = {}; + + }; + static_assert( sizeof( PipelineExecutableInternalRepresentationKHR ) == sizeof( VkPipelineExecutableInternalRepresentationKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineExecutableInternalRepresentationKHR; + }; + + struct PipelineInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineInfoKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}) VULKAN_HPP_NOEXCEPT + : pipeline( pipeline_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineInfoKHR( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineInfoKHR( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PipelineInfoKHR & operator=( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineInfoKHR & operator=( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineInfoKHR ) ); + return *this; + } + + PipelineInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } + + + operator VkPipelineInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineInfoKHR const& ) const = default; +#else + bool operator==( PipelineInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pipeline == rhs.pipeline ); + } + + bool operator!=( PipelineInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + + }; + static_assert( sizeof( PipelineInfoKHR ) == sizeof( VkPipelineInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineInfoKHR; + }; + + struct PipelineExecutablePropertiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutablePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR(VULKAN_HPP_NAMESPACE::ShaderStageFlags stages_ = {}, std::array const& name_ = {}, std::array const& description_ = {}, uint32_t subgroupSize_ = {}) VULKAN_HPP_NOEXCEPT + : stages( stages_ ), name( name_ ), description( description_ ), subgroupSize( subgroupSize_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutablePropertiesKHR( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PipelineExecutablePropertiesKHR & operator=( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineExecutablePropertiesKHR & operator=( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineExecutablePropertiesKHR ) ); + return *this; + } + + + operator VkPipelineExecutablePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutablePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineExecutablePropertiesKHR const& ) const = default; +#else + bool operator==( PipelineExecutablePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( stages == rhs.stages ) + && ( name == rhs.name ) + && ( description == rhs.description ) + && ( subgroupSize == rhs.subgroupSize ); + } + + bool operator!=( PipelineExecutablePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutablePropertiesKHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stages = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + uint32_t subgroupSize = {}; + + }; + static_assert( sizeof( PipelineExecutablePropertiesKHR ) == sizeof( VkPipelineExecutablePropertiesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineExecutablePropertiesKHR; + }; + + union PipelineExecutableStatisticValueKHR + { + PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const& rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) ); + } + + PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::Bool32 b32_ = {} ) + : b32( b32_ ) + {} + + PipelineExecutableStatisticValueKHR( int64_t i64_ ) + : i64( i64_ ) + {} + + PipelineExecutableStatisticValueKHR( uint64_t u64_ ) + : u64( u64_ ) + {} + + PipelineExecutableStatisticValueKHR( double f64_ ) + : f64( f64_ ) + {} + + PipelineExecutableStatisticValueKHR & setB32( VULKAN_HPP_NAMESPACE::Bool32 b32_ ) VULKAN_HPP_NOEXCEPT + { + b32 = b32_; + return *this; + } + + PipelineExecutableStatisticValueKHR & setI64( int64_t i64_ ) VULKAN_HPP_NOEXCEPT + { + i64 = i64_; + return *this; + } + + PipelineExecutableStatisticValueKHR & setU64( uint64_t u64_ ) VULKAN_HPP_NOEXCEPT + { + u64 = u64_; + return *this; + } + + PipelineExecutableStatisticValueKHR & setF64( double f64_ ) VULKAN_HPP_NOEXCEPT + { + f64 = f64_; + return *this; + } + + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) ); + return *this; + } + + operator VkPipelineExecutableStatisticValueKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineExecutableStatisticValueKHR &() + { + return *reinterpret_cast(this); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::Bool32 b32; + int64_t i64; + uint64_t u64; + double f64; +#else + VkBool32 b32; + int64_t i64; + uint64_t u64; + double f64; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct PipelineExecutableStatisticKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableStatisticKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + PipelineExecutableStatisticKHR(std::array const& name_ = {}, std::array const& description_ = {}, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = {}) VULKAN_HPP_NOEXCEPT + : name( name_ ), description( description_ ), format( format_ ), value( value_ ) + {} + + PipelineExecutableStatisticKHR( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutableStatisticKHR( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PipelineExecutableStatisticKHR & operator=( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineExecutableStatisticKHR & operator=( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineExecutableStatisticKHR ) ); + return *this; + } + + + operator VkPipelineExecutableStatisticKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutableStatisticKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableStatisticKHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32; + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value = {}; + + }; + static_assert( sizeof( PipelineExecutableStatisticKHR ) == sizeof( VkPipelineExecutableStatisticKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineExecutableStatisticKHR; + }; + + struct RefreshCycleDurationGOOGLE + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE(uint64_t refreshDuration_ = {}) VULKAN_HPP_NOEXCEPT + : refreshDuration( refreshDuration_ ) + {} + + VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RefreshCycleDurationGOOGLE( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + RefreshCycleDurationGOOGLE & operator=( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RefreshCycleDurationGOOGLE & operator=( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( RefreshCycleDurationGOOGLE ) ); + return *this; + } + + + operator VkRefreshCycleDurationGOOGLE const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRefreshCycleDurationGOOGLE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RefreshCycleDurationGOOGLE const& ) const = default; +#else + bool operator==( RefreshCycleDurationGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( refreshDuration == rhs.refreshDuration ); + } + + bool operator!=( RefreshCycleDurationGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + uint64_t refreshDuration = {}; + + }; + static_assert( sizeof( RefreshCycleDurationGOOGLE ) == sizeof( VkRefreshCycleDurationGOOGLE ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SemaphoreGetFdInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ), handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SemaphoreGetFdInfoKHR & operator=( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SemaphoreGetFdInfoKHR & operator=( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SemaphoreGetFdInfoKHR ) ); + return *this; + } + + SemaphoreGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SemaphoreGetFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + SemaphoreGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + + operator VkSemaphoreGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SemaphoreGetFdInfoKHR const& ) const = default; +#else + bool operator==( SemaphoreGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( semaphore == rhs.semaphore ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( SemaphoreGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetFdInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + + }; + static_assert( sizeof( SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SemaphoreGetFdInfoKHR; + }; + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct SemaphoreGetWin32HandleInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetWin32HandleInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ), handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SemaphoreGetWin32HandleInfoKHR & operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SemaphoreGetWin32HandleInfoKHR & operator=( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SemaphoreGetWin32HandleInfoKHR ) ); + return *this; + } + + SemaphoreGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SemaphoreGetWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + SemaphoreGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + + operator VkSemaphoreGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SemaphoreGetWin32HandleInfoKHR const& ) const = default; +#else + bool operator==( SemaphoreGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( semaphore == rhs.semaphore ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( SemaphoreGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + + }; + static_assert( sizeof( SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SemaphoreGetWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + struct ImportFenceFdInfoKHR { - VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, - VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, - int fd_ = {} ) VULKAN_HPP_NOEXCEPT - : fence( fence_ ) - , flags( flags_ ) - , handleType( handleType_ ) - , fd( fd_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, int fd_ = {}) VULKAN_HPP_NOEXCEPT + : fence( fence_ ), flags( flags_ ), handleType( handleType_ ), fd( fd_ ) {} - VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR ) - offsetof( ImportFenceFdInfoKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - ImportFenceFdInfoKHR& operator=( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + ImportFenceFdInfoKHR & operator=( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImportFenceFdInfoKHR & operator=( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImportFenceFdInfoKHR ) ); return *this; } @@ -37985,6 +46301,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkImportFenceFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -37995,6 +46312,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImportFenceFdInfoKHR const& ) const = default; +#else bool operator==( ImportFenceFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -38009,6 +46330,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceFdInfoKHR; @@ -38017,40 +46341,45 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {}; VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; int fd = {}; + }; static_assert( sizeof( ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#ifdef VK_USE_PLATFORM_WIN32_KHR + template <> + struct CppType + { + using Type = ImportFenceFdInfoKHR; + }; +#ifdef VK_USE_PLATFORM_WIN32_KHR struct ImportFenceWin32HandleInfoKHR { - VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, - VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, - HANDLE handle_ = {}, - LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT - : fence( fence_ ) - , flags( flags_ ) - , handleType( handleType_ ) - , handle( handle_ ) - , name( name_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceWin32HandleInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT + : fence( fence_ ), flags( flags_ ), handleType( handleType_ ), handle( handle_ ), name( name_ ) {} - VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR ) - offsetof( ImportFenceWin32HandleInfoKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - ImportFenceWin32HandleInfoKHR& operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + ImportFenceWin32HandleInfoKHR & operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImportFenceWin32HandleInfoKHR & operator=( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImportFenceWin32HandleInfoKHR ) ); return *this; } @@ -38090,6 +46419,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkImportFenceWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -38100,6 +46430,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImportFenceWin32HandleInfoKHR const& ) const = default; +#else bool operator==( ImportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -38115,6 +46449,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR; @@ -38124,353 +46461,45 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; HANDLE handle = {}; LPCWSTR name = {}; + }; static_assert( sizeof( ImportFenceWin32HandleInfoKHR ) == sizeof( VkImportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct ImportMemoryFdInfoKHR + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, - int fd_ = {} ) VULKAN_HPP_NOEXCEPT - : handleType( handleType_ ) - , fd( fd_ ) - {} - - VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR ) - offsetof( ImportMemoryFdInfoKHR, pNext ) ); - return *this; - } - - ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImportMemoryFdInfoKHR& operator=( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImportMemoryFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImportMemoryFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - ImportMemoryFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT - { - fd = fd_; - return *this; - } - - operator VkImportMemoryFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImportMemoryFdInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ImportMemoryFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleType == rhs.handleType ) - && ( fd == rhs.fd ); - } - - bool operator!=( ImportMemoryFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryFdInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; - int fd = {}; + using Type = ImportFenceWin32HandleInfoKHR; }; - static_assert( sizeof( ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ImportMemoryHostPointerInfoEXT - { - VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, - void* pHostPointer_ = {} ) VULKAN_HPP_NOEXCEPT - : handleType( handleType_ ) - , pHostPointer( pHostPointer_ ) - {} - - VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT & operator=( VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT ) - offsetof( ImportMemoryHostPointerInfoEXT, pNext ) ); - return *this; - } - - ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImportMemoryHostPointerInfoEXT& operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImportMemoryHostPointerInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImportMemoryHostPointerInfoEXT & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - ImportMemoryHostPointerInfoEXT & setPHostPointer( void* pHostPointer_ ) VULKAN_HPP_NOEXCEPT - { - pHostPointer = pHostPointer_; - return *this; - } - - operator VkImportMemoryHostPointerInfoEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImportMemoryHostPointerInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ImportMemoryHostPointerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleType == rhs.handleType ) - && ( pHostPointer == rhs.pHostPointer ); - } - - bool operator!=( ImportMemoryHostPointerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; - void* pHostPointer = {}; - }; - static_assert( sizeof( ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - struct ImportMemoryWin32HandleInfoKHR - { - VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, - HANDLE handle_ = {}, - LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT - : handleType( handleType_ ) - , handle( handle_ ) - , name( name_ ) - {} - - VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR ) - offsetof( ImportMemoryWin32HandleInfoKHR, pNext ) ); - return *this; - } - - ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImportMemoryWin32HandleInfoKHR& operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImportMemoryWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImportMemoryWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - ImportMemoryWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT - { - handle = handle_; - return *this; - } - - ImportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT - { - name = name_; - return *this; - } - - operator VkImportMemoryWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ImportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleType == rhs.handleType ) - && ( handle == rhs.handle ) - && ( name == rhs.name ); - } - - bool operator!=( ImportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; - HANDLE handle = {}; - LPCWSTR name = {}; - }; - static_assert( sizeof( ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - struct ImportMemoryWin32HandleInfoNV - { - VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ = {}, - HANDLE handle_ = {} ) VULKAN_HPP_NOEXCEPT - : handleType( handleType_ ) - , handle( handle_ ) - {} - - VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV & operator=( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV ) - offsetof( ImportMemoryWin32HandleInfoNV, pNext ) ); - return *this; - } - - ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImportMemoryWin32HandleInfoNV& operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImportMemoryWin32HandleInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImportMemoryWin32HandleInfoNV & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - ImportMemoryWin32HandleInfoNV & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT - { - handle = handle_; - return *this; - } - - operator VkImportMemoryWin32HandleInfoNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ImportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleType == rhs.handleType ) - && ( handle == rhs.handle ); - } - - bool operator!=( ImportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType = {}; - HANDLE handle = {}; - }; - static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ struct ImportSemaphoreFdInfoKHR { - VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, - VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, - int fd_ = {} ) VULKAN_HPP_NOEXCEPT - : semaphore( semaphore_ ) - , flags( flags_ ) - , handleType( handleType_ ) - , fd( fd_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, int fd_ = {}) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ), flags( flags_ ), handleType( handleType_ ), fd( fd_ ) {} - VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR ) - offsetof( ImportSemaphoreFdInfoKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - ImportSemaphoreFdInfoKHR& operator=( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + ImportSemaphoreFdInfoKHR & operator=( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImportSemaphoreFdInfoKHR & operator=( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImportSemaphoreFdInfoKHR ) ); return *this; } @@ -38504,6 +46533,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkImportSemaphoreFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -38514,6 +46544,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImportSemaphoreFdInfoKHR const& ) const = default; +#else bool operator==( ImportSemaphoreFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -38528,6 +46562,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreFdInfoKHR; @@ -38536,40 +46573,45 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; int fd = {}; + }; static_assert( sizeof( ImportSemaphoreFdInfoKHR ) == sizeof( VkImportSemaphoreFdInfoKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#ifdef VK_USE_PLATFORM_WIN32_KHR + template <> + struct CppType + { + using Type = ImportSemaphoreFdInfoKHR; + }; +#ifdef VK_USE_PLATFORM_WIN32_KHR struct ImportSemaphoreWin32HandleInfoKHR { - VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, - VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, - HANDLE handle_ = {}, - LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT - : semaphore( semaphore_ ) - , flags( flags_ ) - , handleType( handleType_ ) - , handle( handle_ ) - , name( name_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreWin32HandleInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ), flags( flags_ ), handleType( handleType_ ), handle( handle_ ), name( name_ ) {} - VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR ) - offsetof( ImportSemaphoreWin32HandleInfoKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - ImportSemaphoreWin32HandleInfoKHR& operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + ImportSemaphoreWin32HandleInfoKHR & operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImportSemaphoreWin32HandleInfoKHR & operator=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImportSemaphoreWin32HandleInfoKHR ) ); return *this; } @@ -38609,6 +46651,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkImportSemaphoreWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -38619,6 +46662,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImportSemaphoreWin32HandleInfoKHR const& ) const = default; +#else bool operator==( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -38634,6 +46681,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR; @@ -38643,205 +46693,45 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; HANDLE handle = {}; LPCWSTR name = {}; + }; static_assert( sizeof( ImportSemaphoreWin32HandleInfoKHR ) == sizeof( VkImportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImportSemaphoreWin32HandleInfoKHR; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct IndirectCommandsLayoutTokenNVX - { - VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX::ePipeline, - uint32_t bindingUnit_ = {}, - uint32_t dynamicCount_ = {}, - uint32_t divisor_ = {} ) VULKAN_HPP_NOEXCEPT - : tokenType( tokenType_ ) - , bindingUnit( bindingUnit_ ) - , dynamicCount( dynamicCount_ ) - , divisor( divisor_ ) - {} - - IndirectCommandsLayoutTokenNVX( VkIndirectCommandsLayoutTokenNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - IndirectCommandsLayoutTokenNVX& operator=( VkIndirectCommandsLayoutTokenNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - IndirectCommandsLayoutTokenNVX & setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType_ ) VULKAN_HPP_NOEXCEPT - { - tokenType = tokenType_; - return *this; - } - - IndirectCommandsLayoutTokenNVX & setBindingUnit( uint32_t bindingUnit_ ) VULKAN_HPP_NOEXCEPT - { - bindingUnit = bindingUnit_; - return *this; - } - - IndirectCommandsLayoutTokenNVX & setDynamicCount( uint32_t dynamicCount_ ) VULKAN_HPP_NOEXCEPT - { - dynamicCount = dynamicCount_; - return *this; - } - - IndirectCommandsLayoutTokenNVX & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT - { - divisor = divisor_; - return *this; - } - - operator VkIndirectCommandsLayoutTokenNVX const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkIndirectCommandsLayoutTokenNVX &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( IndirectCommandsLayoutTokenNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( tokenType == rhs.tokenType ) - && ( bindingUnit == rhs.bindingUnit ) - && ( dynamicCount == rhs.dynamicCount ) - && ( divisor == rhs.divisor ); - } - - bool operator!=( IndirectCommandsLayoutTokenNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX::ePipeline; - uint32_t bindingUnit = {}; - uint32_t dynamicCount = {}; - uint32_t divisor = {}; - }; - static_assert( sizeof( IndirectCommandsLayoutTokenNVX ) == sizeof( VkIndirectCommandsLayoutTokenNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct IndirectCommandsLayoutCreateInfoNVX - { - VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNVX( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNVX flags_ = {}, - uint32_t tokenCount_ = {}, - const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNVX* pTokens_ = {} ) VULKAN_HPP_NOEXCEPT - : pipelineBindPoint( pipelineBindPoint_ ) - , flags( flags_ ) - , tokenCount( tokenCount_ ) - , pTokens( pTokens_ ) - {} - - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNVX & operator=( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNVX ) - offsetof( IndirectCommandsLayoutCreateInfoNVX, pNext ) ); - return *this; - } - - IndirectCommandsLayoutCreateInfoNVX( VkIndirectCommandsLayoutCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - IndirectCommandsLayoutCreateInfoNVX& operator=( VkIndirectCommandsLayoutCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - IndirectCommandsLayoutCreateInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - IndirectCommandsLayoutCreateInfoNVX & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT - { - pipelineBindPoint = pipelineBindPoint_; - return *this; - } - - IndirectCommandsLayoutCreateInfoNVX & setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - IndirectCommandsLayoutCreateInfoNVX & setTokenCount( uint32_t tokenCount_ ) VULKAN_HPP_NOEXCEPT - { - tokenCount = tokenCount_; - return *this; - } - - IndirectCommandsLayoutCreateInfoNVX & setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNVX* pTokens_ ) VULKAN_HPP_NOEXCEPT - { - pTokens = pTokens_; - return *this; - } - - operator VkIndirectCommandsLayoutCreateInfoNVX const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkIndirectCommandsLayoutCreateInfoNVX &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pipelineBindPoint == rhs.pipelineBindPoint ) - && ( flags == rhs.flags ) - && ( tokenCount == rhs.tokenCount ) - && ( pTokens == rhs.pTokens ); - } - - bool operator!=( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNVX; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNVX flags = {}; - uint32_t tokenCount = {}; - const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNVX* pTokens = {}; - }; - static_assert( sizeof( IndirectCommandsLayoutCreateInfoNVX ) == sizeof( VkIndirectCommandsLayoutCreateInfoNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct InitializePerformanceApiInfoINTEL { - VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( void* pUserData_ = {} ) VULKAN_HPP_NOEXCEPT - : pUserData( pUserData_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInitializePerformanceApiInfoINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL(void* pUserData_ = {}) VULKAN_HPP_NOEXCEPT + : pUserData( pUserData_ ) {} - VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & operator=( VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL ) - offsetof( InitializePerformanceApiInfoINTEL, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; InitializePerformanceApiInfoINTEL( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - InitializePerformanceApiInfoINTEL& operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + InitializePerformanceApiInfoINTEL & operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + InitializePerformanceApiInfoINTEL & operator=( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( InitializePerformanceApiInfoINTEL ) ); return *this; } @@ -38857,6 +46747,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkInitializePerformanceApiInfoINTEL const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -38867,6 +46758,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( InitializePerformanceApiInfoINTEL const& ) const = default; +#else bool operator==( InitializePerformanceApiInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -38878,33 +46773,12873 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInitializePerformanceApiInfoINTEL; const void* pNext = {}; void* pUserData = {}; + }; static_assert( sizeof( InitializePerformanceApiInfoINTEL ) == sizeof( VkInitializePerformanceApiInfoINTEL ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = InitializePerformanceApiInfoINTEL; + }; + + struct DisplayEventInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayEventInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT(VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut) VULKAN_HPP_NOEXCEPT + : displayEvent( displayEvent_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DisplayEventInfoEXT & operator=( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DisplayEventInfoEXT & operator=( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DisplayEventInfoEXT ) ); + return *this; + } + + DisplayEventInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DisplayEventInfoEXT & setDisplayEvent( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ ) VULKAN_HPP_NOEXCEPT + { + displayEvent = displayEvent_; + return *this; + } + + + operator VkDisplayEventInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayEventInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayEventInfoEXT const& ) const = default; +#else + bool operator==( DisplayEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( displayEvent == rhs.displayEvent ); + } + + bool operator!=( DisplayEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayEventInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut; + + }; + static_assert( sizeof( DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplayEventInfoEXT; + }; + + struct XYColorEXT + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR XYColorEXT(float x_ = {}, float y_ = {}) VULKAN_HPP_NOEXCEPT + : x( x_ ), y( y_ ) + {} + + VULKAN_HPP_CONSTEXPR XYColorEXT( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + XYColorEXT( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + XYColorEXT & operator=( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + XYColorEXT & operator=( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( XYColorEXT ) ); + return *this; + } + + XYColorEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + XYColorEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + + operator VkXYColorEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkXYColorEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( XYColorEXT const& ) const = default; +#else + bool operator==( XYColorEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( x == rhs.x ) + && ( y == rhs.y ); + } + + bool operator!=( XYColorEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + float x = {}; + float y = {}; + + }; + static_assert( sizeof( XYColorEXT ) == sizeof( VkXYColorEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct HdrMetadataEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHdrMetadataEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR HdrMetadataEXT(VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ = {}, VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ = {}, VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ = {}, VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ = {}, float maxLuminance_ = {}, float minLuminance_ = {}, float maxContentLightLevel_ = {}, float maxFrameAverageLightLevel_ = {}) VULKAN_HPP_NOEXCEPT + : displayPrimaryRed( displayPrimaryRed_ ), displayPrimaryGreen( displayPrimaryGreen_ ), displayPrimaryBlue( displayPrimaryBlue_ ), whitePoint( whitePoint_ ), maxLuminance( maxLuminance_ ), minLuminance( minLuminance_ ), maxContentLightLevel( maxContentLightLevel_ ), maxFrameAverageLightLevel( maxFrameAverageLightLevel_ ) + {} + + VULKAN_HPP_CONSTEXPR HdrMetadataEXT( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + HdrMetadataEXT & operator=( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + HdrMetadataEXT & operator=( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( HdrMetadataEXT ) ); + return *this; + } + + HdrMetadataEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + HdrMetadataEXT & setDisplayPrimaryRed( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryRed_ ) VULKAN_HPP_NOEXCEPT + { + displayPrimaryRed = displayPrimaryRed_; + return *this; + } + + HdrMetadataEXT & setDisplayPrimaryGreen( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryGreen_ ) VULKAN_HPP_NOEXCEPT + { + displayPrimaryGreen = displayPrimaryGreen_; + return *this; + } + + HdrMetadataEXT & setDisplayPrimaryBlue( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryBlue_ ) VULKAN_HPP_NOEXCEPT + { + displayPrimaryBlue = displayPrimaryBlue_; + return *this; + } + + HdrMetadataEXT & setWhitePoint( VULKAN_HPP_NAMESPACE::XYColorEXT const & whitePoint_ ) VULKAN_HPP_NOEXCEPT + { + whitePoint = whitePoint_; + return *this; + } + + HdrMetadataEXT & setMaxLuminance( float maxLuminance_ ) VULKAN_HPP_NOEXCEPT + { + maxLuminance = maxLuminance_; + return *this; + } + + HdrMetadataEXT & setMinLuminance( float minLuminance_ ) VULKAN_HPP_NOEXCEPT + { + minLuminance = minLuminance_; + return *this; + } + + HdrMetadataEXT & setMaxContentLightLevel( float maxContentLightLevel_ ) VULKAN_HPP_NOEXCEPT + { + maxContentLightLevel = maxContentLightLevel_; + return *this; + } + + HdrMetadataEXT & setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ ) VULKAN_HPP_NOEXCEPT + { + maxFrameAverageLightLevel = maxFrameAverageLightLevel_; + return *this; + } + + + operator VkHdrMetadataEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHdrMetadataEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( HdrMetadataEXT const& ) const = default; +#else + bool operator==( HdrMetadataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( displayPrimaryRed == rhs.displayPrimaryRed ) + && ( displayPrimaryGreen == rhs.displayPrimaryGreen ) + && ( displayPrimaryBlue == rhs.displayPrimaryBlue ) + && ( whitePoint == rhs.whitePoint ) + && ( maxLuminance == rhs.maxLuminance ) + && ( minLuminance == rhs.minLuminance ) + && ( maxContentLightLevel == rhs.maxContentLightLevel ) + && ( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel ); + } + + bool operator!=( HdrMetadataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHdrMetadataEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint = {}; + float maxLuminance = {}; + float minLuminance = {}; + float maxContentLightLevel = {}; + float maxFrameAverageLightLevel = {}; + + }; + static_assert( sizeof( HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = HdrMetadataEXT; + }; + + struct SemaphoreSignalInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSignalInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, uint64_t value_ = {}) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ), value( value_ ) + {} + + VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreSignalInfo( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SemaphoreSignalInfo & operator=( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SemaphoreSignalInfo & operator=( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SemaphoreSignalInfo ) ); + return *this; + } + + SemaphoreSignalInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SemaphoreSignalInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + SemaphoreSignalInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT + { + value = value_; + return *this; + } + + + operator VkSemaphoreSignalInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreSignalInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SemaphoreSignalInfo const& ) const = default; +#else + bool operator==( SemaphoreSignalInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( semaphore == rhs.semaphore ) + && ( value == rhs.value ); + } + + bool operator!=( SemaphoreSignalInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSignalInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + uint64_t value = {}; + + }; + static_assert( sizeof( SemaphoreSignalInfo ) == sizeof( VkSemaphoreSignalInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SemaphoreSignalInfo; + }; + using SemaphoreSignalInfoKHR = SemaphoreSignalInfo; + + struct SemaphoreWaitInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreWaitInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo(VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ = {}, uint32_t semaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores_ = {}, const uint64_t* pValues_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), semaphoreCount( semaphoreCount_ ), pSemaphores( pSemaphores_ ), pValues( pValues_ ) + {} + + VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreWaitInfo( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & semaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ = {} ) + : flags( flags_ ), semaphoreCount( static_cast( semaphores_.size() ) ), pSemaphores( semaphores_.data() ), pValues( values_.data() ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( semaphores_.size() == values_.size() ); +#else + if ( semaphores_.size() != values_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SemaphoreWaitInfo::SemaphoreWaitInfo: semaphores_.size() != values_.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SemaphoreWaitInfo & operator=( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SemaphoreWaitInfo & operator=( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SemaphoreWaitInfo ) ); + return *this; + } + + SemaphoreWaitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SemaphoreWaitInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + SemaphoreWaitInfo & setSemaphoreCount( uint32_t semaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + semaphoreCount = semaphoreCount_; + return *this; + } + + SemaphoreWaitInfo & setPSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pSemaphores = pSemaphores_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SemaphoreWaitInfo & setSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & semaphores_ ) VULKAN_HPP_NOEXCEPT + { + semaphoreCount = static_cast( semaphores_.size() ); + pSemaphores = semaphores_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + SemaphoreWaitInfo & setPValues( const uint64_t* pValues_ ) VULKAN_HPP_NOEXCEPT + { + pValues = pValues_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + SemaphoreWaitInfo & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT + { + semaphoreCount = static_cast( values_.size() ); + pValues = values_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkSemaphoreWaitInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreWaitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SemaphoreWaitInfo const& ) const = default; +#else + bool operator==( SemaphoreWaitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( semaphoreCount == rhs.semaphoreCount ) + && ( pSemaphores == rhs.pSemaphores ) + && ( pValues == rhs.pValues ); + } + + bool operator!=( SemaphoreWaitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreWaitInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags = {}; + uint32_t semaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores = {}; + const uint64_t* pValues = {}; + + }; + static_assert( sizeof( SemaphoreWaitInfo ) == sizeof( VkSemaphoreWaitInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SemaphoreWaitInfo; + }; + using SemaphoreWaitInfoKHR = SemaphoreWaitInfo; + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + class Device; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueAccelerationStructureKHR = UniqueHandle; + using UniqueAccelerationStructureNV = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueBuffer = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueBufferView = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = PoolFree; }; + using UniqueCommandBuffer = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueCommandPool = UniqueHandle; +#ifdef VK_ENABLE_BETA_EXTENSIONS + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueDeferredOperationKHR = UniqueHandle; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueDescriptorPool = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = PoolFree; }; + using UniqueDescriptorSet = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueDescriptorSetLayout = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueDescriptorUpdateTemplate = UniqueHandle; + using UniqueDescriptorUpdateTemplateKHR = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectFree; }; + using UniqueDeviceMemory = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueEvent = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueFence = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueFramebuffer = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueImage = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueImageView = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueIndirectCommandsLayoutNV = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniquePipeline = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniquePipelineCache = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniquePipelineLayout = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniquePrivateDataSlotEXT = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueQueryPool = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueRenderPass = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueSampler = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueSamplerYcbcrConversion = UniqueHandle; + using UniqueSamplerYcbcrConversionKHR = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueSemaphore = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueShaderModule = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueSwapchainKHR = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueValidationCacheEXT = UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + class Device + { + public: + using CType = VkDevice; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDevice; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice; + + public: + VULKAN_HPP_CONSTEXPR Device() VULKAN_HPP_NOEXCEPT + : m_device(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Device( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_device(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Device( VkDevice device ) VULKAN_HPP_NOEXCEPT + : m_device( device ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Device & operator=(VkDevice device) VULKAN_HPP_NOEXCEPT + { + m_device = device; + return *this; + } +#endif + + Device & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_device = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Device const& ) const = default; +#else + bool operator==( Device const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_device == rhs.m_device; + } + + bool operator!=(Device const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_device != rhs.m_device; + } + + bool operator<(Device const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_device < rhs.m_device; + } +#endif + +#ifdef VK_USE_PLATFORM_WIN32_KHR +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_NODISCARD Result acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD ResultValue acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, uint32_t* pImageIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD ResultValue acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL* pConfiguration, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template>> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType,Allocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template>, typename B = Allocator, typename std::enable_if>::value, int>::type = 0> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType,Allocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template>> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType,Allocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template>, typename B = Allocator, typename std::enable_if>::value, int>::type = 0> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType,Allocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo* pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory* pMemory, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_NODISCARD Result bindAccelerationStructureMemoryKHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR* pBindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type bindAccelerationStructureMemoryKHR( ArrayProxy const &bindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + VULKAN_HPP_NODISCARD Result bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR* pBindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type bindAccelerationStructureMemoryNV( ArrayProxy const &bindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type bindBufferMemory2( ArrayProxy const &bindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result bindBufferMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type bindBufferMemory2KHR( ArrayProxy const &bindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type bindImageMemory2( ArrayProxy const &bindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result bindImageMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type bindImageMemory2KHR( ArrayProxy const &bindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_NODISCARD Result buildAccelerationStructureKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const * ppOffsetInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result buildAccelerationStructureKHR( ArrayProxy const &infos, ArrayProxy const &pOffsetInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_NODISCARD Result copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_NODISCARD Result copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_NODISCARD Result copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_NODISCARD Result createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructure, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createAccelerationStructureKHR( const AccelerationStructureCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createAccelerationStructureKHRUnique( const AccelerationStructureCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + Result createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructure, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Buffer* pBuffer, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createBuffer( const BufferCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createBufferUnique( const BufferCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::BufferView* pView, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createBufferView( const BufferViewCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::CommandPool* pCommandPool, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createCommandPool( const CommandPoolCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + VULKAN_HPP_NODISCARD ResultValue createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template>> + VULKAN_HPP_NODISCARD ResultValue,Allocator>> createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template>, typename B = Allocator, typename std::enable_if>::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue,Allocator>> createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + VULKAN_HPP_NODISCARD ResultValue> createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + Result createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeferredOperationKHR* pDeferredOperation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createDeferredOperationKHR( Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createDeferredOperationKHRUnique( Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + VULKAN_HPP_NODISCARD Result createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool* pDescriptorPool, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Event* pEvent, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createEvent( const EventCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createEventUnique( const EventCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createFence( const FenceCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createFenceUnique( const FenceCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer* pFramebuffer, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createFramebuffer( const FramebufferCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + VULKAN_HPP_NODISCARD ResultValue createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template>> + VULKAN_HPP_NODISCARD ResultValue,Allocator>> createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template>, typename B = Allocator, typename std::enable_if>::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue,Allocator>> createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + VULKAN_HPP_NODISCARD ResultValue> createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Image* pImage, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createImage( const ImageCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createImageUnique( const ImageCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ImageView* pView, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createImageView( const ImageViewCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV* pIndirectCommandsLayout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createIndirectCommandsLayoutNV( const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createIndirectCommandsLayoutNVUnique( const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache* pPipelineCache, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout* pPipelineLayout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT* pPrivateDataSlot, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createPrivateDataSlotEXT( const PrivateDataSlotCreateInfoEXT & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createPrivateDataSlotEXTUnique( const PrivateDataSlotCreateInfoEXT & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::QueryPool* pQueryPool, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createQueryPool( const QueryPoolCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_NODISCARD Result createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + VULKAN_HPP_NODISCARD ResultValue createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template>> + VULKAN_HPP_NODISCARD ResultValue,Allocator>> createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template>, typename B = Allocator, typename std::enable_if>::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue,Allocator>> createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + VULKAN_HPP_NODISCARD Result createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + VULKAN_HPP_NODISCARD ResultValue createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template>> + VULKAN_HPP_NODISCARD ResultValue,Allocator>> createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template>, typename B = Allocator, typename std::enable_if>::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue,Allocator>> createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createRenderPass( const RenderPassCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createRenderPass2( const RenderPassCreateInfo2 & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Sampler* pSampler, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createSampler( const SamplerCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createSamplerUnique( const SamplerCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Semaphore* pSemaphore, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createSemaphore( const SemaphoreCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ShaderModule* pShaderModule, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createSharedSwapchainsKHR( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createSharedSwapchainsKHR( ArrayProxy const &createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createSharedSwapchainsKHR( ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template>> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy const &createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template>, typename B = Allocator, typename std::enable_if>::value, int>::type = 0> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pValidationCache, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD Result deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Event event, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Fence fence, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyImage( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyImage( VULKAN_HPP_NAMESPACE::Image image, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Image image, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result waitIdle(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type waitIdle(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result flushMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type flushMappedMemoryRanges( ArrayProxy const &memoryRanges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy const &commandBuffers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy const &commandBuffers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy const &descriptorSets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy const &descriptorSets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + DeviceAddress getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + DeviceAddress getAccelerationStructureAddressKHR( const AccelerationStructureDeviceAddressInfoKHR & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + + template + VULKAN_HPP_NODISCARD Result getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, ArrayProxy const &data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, size_t dataSize, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + void getAccelerationStructureMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::MemoryRequirements2 getAccelerationStructureMemoryRequirementsKHR( const AccelerationStructureMemoryRequirementsInfoKHR & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getAccelerationStructureMemoryRequirementsKHR( const AccelerationStructureMemoryRequirementsInfoKHR & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + void getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR* pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + template + VULKAN_HPP_NODISCARD Result getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template + DeviceAddress getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + DeviceAddress getBufferAddress( const BufferDeviceAddressInfo & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + DeviceAddress getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + DeviceAddress getBufferAddressEXT( const BufferDeviceAddressInfo & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + DeviceAddress getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + DeviceAddress getBufferAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::MemoryRequirements getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint64_t getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint64_t getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getCalibratedTimestampsEXT( uint32_t timestampCount, const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getCalibratedTimestampsEXT( ArrayProxy const ×tampInfos, ArrayProxy const ×tamps, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint32_t getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + uint32_t getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD Result getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + void getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + Result getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionKHR* version, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionKHR & version, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + void getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getGroupPresentCapabilitiesKHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize* pCommittedMemoryInBytes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::DeviceSize getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint64_t getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint64_t getMemoryOpaqueCaptureAddressKHR( const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::Queue getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2* pQueueInfo, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::Queue getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_NODISCARD Result getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + void getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::MemoryRequirements2 getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::MemoryRequirements getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + std::vector getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + std::vector getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + std::vector getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource* pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout* pLayout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::SubresourceLayout getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const ImageSubresource & subresource, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint32_t getImageViewHandleNVX( const ImageViewHandleInfoNVX & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + template + VULKAN_HPP_NODISCARD Result getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template + VULKAN_HPP_NODISCARD Result getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_NODISCARD Result getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_NODISCARD Result getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_NODISCARD Result getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_NODISCARD Result getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, VULKAN_HPP_NAMESPACE::PerformanceValueINTEL* pValue, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR* pInternalRepresentations, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR* pStatistics, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint64_t getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + + template + VULKAN_HPP_NODISCARD Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy const &data, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_NODISCARD Result getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy const &data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy const &data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + + template + VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy const &data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + + template + VULKAN_HPP_NODISCARD Result getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, VULKAN_HPP_NAMESPACE::Extent2D* pGranularity, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::Extent2D getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_NODISCARD Result getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_NODISCARD Result getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VULKAN_HPP_NAMESPACE::Image* pSwapchainImages, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_NODISCARD Result importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_NODISCARD Result importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_NODISCARD Result importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_NODISCARD Result initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type invalidateMappedMemoryRanges( ArrayProxy const &memoryRanges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void** ppData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::PipelineCache* pSrcCaches, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, ArrayProxy const &srcCaches, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pSrcCaches, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, ArrayProxy const &srcCaches, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT* pDeviceEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT* pDisplayEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void releaseProfilingLockKHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void releaseProfilingLockKHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result resetFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type resetFences( ArrayProxy const &fences, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setHdrMetadataEXT( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, const VULKAN_HPP_NAMESPACE::HdrMetadataEXT* pMetadata, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setHdrMetadataEXT( ArrayProxy const &swapchains, ArrayProxy const &metadata, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void uninitializePerformanceApiINTEL(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void uninitializePerformanceApiINTEL(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void updateDescriptorSets( uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet* pDescriptorCopies, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void updateDescriptorSets( ArrayProxy const &descriptorWrites, ArrayProxy const &descriptorCopies, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result waitForFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result waitForFences( ArrayProxy const &fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result waitSemaphores( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_NODISCARD Result writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, void* pData, size_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type writeAccelerationStructuresPropertiesKHR( ArrayProxy const &accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, ArrayProxy const &data, size_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDevice() const VULKAN_HPP_NOEXCEPT + { + return m_device; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_device != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_device == VK_NULL_HANDLE; + } + + private: + VkDevice m_device; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Device ) == sizeof( VkDevice ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Device; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Device; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Device; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct DisplayModeParametersKHR + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR(VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ = {}, uint32_t refreshRate_ = {}) VULKAN_HPP_NOEXCEPT + : visibleRegion( visibleRegion_ ), refreshRate( refreshRate_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DisplayModeParametersKHR & operator=( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DisplayModeParametersKHR & operator=( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DisplayModeParametersKHR ) ); + return *this; + } + + DisplayModeParametersKHR & setVisibleRegion( VULKAN_HPP_NAMESPACE::Extent2D const & visibleRegion_ ) VULKAN_HPP_NOEXCEPT + { + visibleRegion = visibleRegion_; + return *this; + } + + DisplayModeParametersKHR & setRefreshRate( uint32_t refreshRate_ ) VULKAN_HPP_NOEXCEPT + { + refreshRate = refreshRate_; + return *this; + } + + + operator VkDisplayModeParametersKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModeParametersKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayModeParametersKHR const& ) const = default; +#else + bool operator==( DisplayModeParametersKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( visibleRegion == rhs.visibleRegion ) + && ( refreshRate == rhs.refreshRate ); + } + + bool operator!=( DisplayModeParametersKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::Extent2D visibleRegion = {}; + uint32_t refreshRate = {}; + + }; + static_assert( sizeof( DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DisplayModeCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR(VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), parameters( parameters_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DisplayModeCreateInfoKHR & operator=( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DisplayModeCreateInfoKHR & operator=( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DisplayModeCreateInfoKHR ) ); + return *this; + } + + DisplayModeCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DisplayModeCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DisplayModeCreateInfoKHR & setParameters( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const & parameters_ ) VULKAN_HPP_NOEXCEPT + { + parameters = parameters_; + return *this; + } + + + operator VkDisplayModeCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayModeCreateInfoKHR const& ) const = default; +#else + bool operator==( DisplayModeCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( parameters == rhs.parameters ); + } + + bool operator!=( DisplayModeCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeCreateInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {}; + + }; + static_assert( sizeof( DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplayModeCreateInfoKHR; + }; + + class DisplayModeKHR + { + public: + using CType = VkDisplayModeKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR; + + public: + VULKAN_HPP_CONSTEXPR DisplayModeKHR() VULKAN_HPP_NOEXCEPT + : m_displayModeKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DisplayModeKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_displayModeKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DisplayModeKHR( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT + : m_displayModeKHR( displayModeKHR ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DisplayModeKHR & operator=(VkDisplayModeKHR displayModeKHR) VULKAN_HPP_NOEXCEPT + { + m_displayModeKHR = displayModeKHR; + return *this; + } +#endif + + DisplayModeKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_displayModeKHR = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayModeKHR const& ) const = default; +#else + bool operator==( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR == rhs.m_displayModeKHR; + } + + bool operator!=(DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR != rhs.m_displayModeKHR; + } + + bool operator<(DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR < rhs.m_displayModeKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayModeKHR() const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR == VK_NULL_HANDLE; + } + + private: + VkDisplayModeKHR m_displayModeKHR; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeKHR ) == sizeof( VkDisplayModeKHR ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DisplayModeKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DisplayModeKHR; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DisplayModeKHR; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct ExtensionProperties + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 ExtensionProperties(std::array const& extensionName_ = {}, uint32_t specVersion_ = {}) VULKAN_HPP_NOEXCEPT + : extensionName( extensionName_ ), specVersion( specVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExtensionProperties( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ExtensionProperties & operator=( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExtensionProperties & operator=( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ExtensionProperties ) ); + return *this; + } + + + operator VkExtensionProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExtensionProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExtensionProperties const& ) const = default; +#else + bool operator==( ExtensionProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( extensionName == rhs.extensionName ) + && ( specVersion == rhs.specVersion ); + } + + bool operator!=( ExtensionProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper1D extensionName = {}; + uint32_t specVersion = {}; + + }; + static_assert( sizeof( ExtensionProperties ) == sizeof( VkExtensionProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct LayerProperties + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 LayerProperties(std::array const& layerName_ = {}, uint32_t specVersion_ = {}, uint32_t implementationVersion_ = {}, std::array const& description_ = {}) VULKAN_HPP_NOEXCEPT + : layerName( layerName_ ), specVersion( specVersion_ ), implementationVersion( implementationVersion_ ), description( description_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 LayerProperties( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + LayerProperties & operator=( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + LayerProperties & operator=( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( LayerProperties ) ); + return *this; + } + + + operator VkLayerProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLayerProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( LayerProperties const& ) const = default; +#else + bool operator==( LayerProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( layerName == rhs.layerName ) + && ( specVersion == rhs.specVersion ) + && ( implementationVersion == rhs.implementationVersion ) + && ( description == rhs.description ); + } + + bool operator!=( LayerProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper1D layerName = {}; + uint32_t specVersion = {}; + uint32_t implementationVersion = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + + }; + static_assert( sizeof( LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PerformanceCounterKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR(VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit_ = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric, VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope_ = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer, VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage_ = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32, std::array const& uuid_ = {}) VULKAN_HPP_NOEXCEPT + : unit( unit_ ), scope( scope_ ), storage( storage_ ), uuid( uuid_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceCounterKHR( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PerformanceCounterKHR & operator=( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PerformanceCounterKHR & operator=( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PerformanceCounterKHR ) ); + return *this; + } + + + operator VkPerformanceCounterKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PerformanceCounterKHR const& ) const = default; +#else + bool operator==( PerformanceCounterKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( unit == rhs.unit ) + && ( scope == rhs.scope ) + && ( storage == rhs.storage ) + && ( uuid == rhs.uuid ); + } + + bool operator!=( PerformanceCounterKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric; + VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer; + VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D uuid = {}; + + }; + static_assert( sizeof( PerformanceCounterKHR ) == sizeof( VkPerformanceCounterKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PerformanceCounterKHR; + }; + + struct PerformanceCounterDescriptionKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterDescriptionKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR(VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_ = {}, std::array const& name_ = {}, std::array const& category_ = {}, std::array const& description_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), name( name_ ), category( category_ ), description( description_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceCounterDescriptionKHR( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PerformanceCounterDescriptionKHR & operator=( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PerformanceCounterDescriptionKHR & operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PerformanceCounterDescriptionKHR ) ); + return *this; + } + + + operator VkPerformanceCounterDescriptionKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceCounterDescriptionKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PerformanceCounterDescriptionKHR const& ) const = default; +#else + bool operator==( PerformanceCounterDescriptionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( name == rhs.name ) + && ( category == rhs.category ) + && ( description == rhs.description ); + } + + bool operator!=( PerformanceCounterDescriptionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterDescriptionKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D category = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + + }; + static_assert( sizeof( PerformanceCounterDescriptionKHR ) == sizeof( VkPerformanceCounterDescriptionKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PerformanceCounterDescriptionKHR; + }; + + struct DisplayModePropertiesKHR + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {}) VULKAN_HPP_NOEXCEPT + : displayMode( displayMode_ ), parameters( parameters_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModePropertiesKHR( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DisplayModePropertiesKHR & operator=( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DisplayModePropertiesKHR & operator=( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DisplayModePropertiesKHR ) ); + return *this; + } + + + operator VkDisplayModePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayModePropertiesKHR const& ) const = default; +#else + bool operator==( DisplayModePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( displayMode == rhs.displayMode ) + && ( parameters == rhs.parameters ); + } + + bool operator!=( DisplayModePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {}; + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {}; + + }; + static_assert( sizeof( DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DisplayModeProperties2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeProperties2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties_ = {}) VULKAN_HPP_NOEXCEPT + : displayModeProperties( displayModeProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeProperties2KHR( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DisplayModeProperties2KHR & operator=( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DisplayModeProperties2KHR & operator=( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DisplayModeProperties2KHR ) ); + return *this; + } + + + operator VkDisplayModeProperties2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModeProperties2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayModeProperties2KHR const& ) const = default; +#else + bool operator==( DisplayModeProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( displayModeProperties == rhs.displayModeProperties ); + } + + bool operator!=( DisplayModeProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeProperties2KHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties = {}; + + }; + static_assert( sizeof( DisplayModeProperties2KHR ) == sizeof( VkDisplayModeProperties2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplayModeProperties2KHR; + }; + + struct DisplayPlaneInfo2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ = {}, uint32_t planeIndex_ = {}) VULKAN_HPP_NOEXCEPT + : mode( mode_ ), planeIndex( planeIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DisplayPlaneInfo2KHR & operator=( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DisplayPlaneInfo2KHR & operator=( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DisplayPlaneInfo2KHR ) ); + return *this; + } + + DisplayPlaneInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DisplayPlaneInfo2KHR & setMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + DisplayPlaneInfo2KHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT + { + planeIndex = planeIndex_; + return *this; + } + + + operator VkDisplayPlaneInfo2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlaneInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayPlaneInfo2KHR const& ) const = default; +#else + bool operator==( DisplayPlaneInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( mode == rhs.mode ) + && ( planeIndex == rhs.planeIndex ); + } + + bool operator!=( DisplayPlaneInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneInfo2KHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode = {}; + uint32_t planeIndex = {}; + + }; + static_assert( sizeof( DisplayPlaneInfo2KHR ) == sizeof( VkDisplayPlaneInfo2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplayPlaneInfo2KHR; + }; + + struct DisplayPlaneCapabilitiesKHR + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha_ = {}, VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition_ = {}, VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent_ = {}, VULKAN_HPP_NAMESPACE::Offset2D minDstPosition_ = {}, VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minDstExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent_ = {}) VULKAN_HPP_NOEXCEPT + : supportedAlpha( supportedAlpha_ ), minSrcPosition( minSrcPosition_ ), maxSrcPosition( maxSrcPosition_ ), minSrcExtent( minSrcExtent_ ), maxSrcExtent( maxSrcExtent_ ), minDstPosition( minDstPosition_ ), maxDstPosition( maxDstPosition_ ), minDstExtent( minDstExtent_ ), maxDstExtent( maxDstExtent_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneCapabilitiesKHR( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DisplayPlaneCapabilitiesKHR & operator=( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DisplayPlaneCapabilitiesKHR & operator=( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DisplayPlaneCapabilitiesKHR ) ); + return *this; + } + + + operator VkDisplayPlaneCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlaneCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayPlaneCapabilitiesKHR const& ) const = default; +#else + bool operator==( DisplayPlaneCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( supportedAlpha == rhs.supportedAlpha ) + && ( minSrcPosition == rhs.minSrcPosition ) + && ( maxSrcPosition == rhs.maxSrcPosition ) + && ( minSrcExtent == rhs.minSrcExtent ) + && ( maxSrcExtent == rhs.maxSrcExtent ) + && ( minDstPosition == rhs.minDstPosition ) + && ( maxDstPosition == rhs.maxDstPosition ) + && ( minDstExtent == rhs.minDstExtent ) + && ( maxDstExtent == rhs.maxDstExtent ); + } + + bool operator!=( DisplayPlaneCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha = {}; + VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition = {}; + VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition = {}; + VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent = {}; + VULKAN_HPP_NAMESPACE::Offset2D minDstPosition = {}; + VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition = {}; + VULKAN_HPP_NAMESPACE::Extent2D minDstExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent = {}; + + }; + static_assert( sizeof( DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DisplayPlaneCapabilities2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneCapabilities2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR(VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities_ = {}) VULKAN_HPP_NOEXCEPT + : capabilities( capabilities_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneCapabilities2KHR( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DisplayPlaneCapabilities2KHR & operator=( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DisplayPlaneCapabilities2KHR & operator=( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DisplayPlaneCapabilities2KHR ) ); + return *this; + } + + + operator VkDisplayPlaneCapabilities2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlaneCapabilities2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayPlaneCapabilities2KHR const& ) const = default; +#else + bool operator==( DisplayPlaneCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( capabilities == rhs.capabilities ); + } + + bool operator!=( DisplayPlaneCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities = {}; + + }; + static_assert( sizeof( DisplayPlaneCapabilities2KHR ) == sizeof( VkDisplayPlaneCapabilities2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplayPlaneCapabilities2KHR; + }; + + struct DisplayPlanePropertiesKHR + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay_ = {}, uint32_t currentStackIndex_ = {}) VULKAN_HPP_NOEXCEPT + : currentDisplay( currentDisplay_ ), currentStackIndex( currentStackIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlanePropertiesKHR( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DisplayPlanePropertiesKHR & operator=( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DisplayPlanePropertiesKHR & operator=( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DisplayPlanePropertiesKHR ) ); + return *this; + } + + + operator VkDisplayPlanePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlanePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayPlanePropertiesKHR const& ) const = default; +#else + bool operator==( DisplayPlanePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( currentDisplay == rhs.currentDisplay ) + && ( currentStackIndex == rhs.currentStackIndex ); + } + + bool operator!=( DisplayPlanePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay = {}; + uint32_t currentStackIndex = {}; + + }; + static_assert( sizeof( DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DisplayPlaneProperties2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneProperties2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties_ = {}) VULKAN_HPP_NOEXCEPT + : displayPlaneProperties( displayPlaneProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneProperties2KHR( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DisplayPlaneProperties2KHR & operator=( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DisplayPlaneProperties2KHR & operator=( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DisplayPlaneProperties2KHR ) ); + return *this; + } + + + operator VkDisplayPlaneProperties2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlaneProperties2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayPlaneProperties2KHR const& ) const = default; +#else + bool operator==( DisplayPlaneProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( displayPlaneProperties == rhs.displayPlaneProperties ); + } + + bool operator!=( DisplayPlaneProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneProperties2KHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties = {}; + + }; + static_assert( sizeof( DisplayPlaneProperties2KHR ) == sizeof( VkDisplayPlaneProperties2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplayPlaneProperties2KHR; + }; + + struct DisplayPropertiesKHR + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display_ = {}, const char* displayName_ = {}, VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions_ = {}, VULKAN_HPP_NAMESPACE::Extent2D physicalResolution_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible_ = {}, VULKAN_HPP_NAMESPACE::Bool32 persistentContent_ = {}) VULKAN_HPP_NOEXCEPT + : display( display_ ), displayName( displayName_ ), physicalDimensions( physicalDimensions_ ), physicalResolution( physicalResolution_ ), supportedTransforms( supportedTransforms_ ), planeReorderPossible( planeReorderPossible_ ), persistentContent( persistentContent_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPropertiesKHR( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DisplayPropertiesKHR & operator=( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DisplayPropertiesKHR & operator=( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DisplayPropertiesKHR ) ); + return *this; + } + + + operator VkDisplayPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayPropertiesKHR const& ) const = default; +#else + bool operator==( DisplayPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( display == rhs.display ) + && ( displayName == rhs.displayName ) + && ( physicalDimensions == rhs.physicalDimensions ) + && ( physicalResolution == rhs.physicalResolution ) + && ( supportedTransforms == rhs.supportedTransforms ) + && ( planeReorderPossible == rhs.planeReorderPossible ) + && ( persistentContent == rhs.persistentContent ); + } + + bool operator!=( DisplayPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::DisplayKHR display = {}; + const char* displayName = {}; + VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions = {}; + VULKAN_HPP_NAMESPACE::Extent2D physicalResolution = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; + VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible = {}; + VULKAN_HPP_NAMESPACE::Bool32 persistentContent = {}; + + }; + static_assert( sizeof( DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DisplayProperties2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayProperties2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties_ = {}) VULKAN_HPP_NOEXCEPT + : displayProperties( displayProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayProperties2KHR( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DisplayProperties2KHR & operator=( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DisplayProperties2KHR & operator=( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DisplayProperties2KHR ) ); + return *this; + } + + + operator VkDisplayProperties2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayProperties2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayProperties2KHR const& ) const = default; +#else + bool operator==( DisplayProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( displayProperties == rhs.displayProperties ); + } + + bool operator!=( DisplayProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayProperties2KHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties = {}; + + }; + static_assert( sizeof( DisplayProperties2KHR ) == sizeof( VkDisplayProperties2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplayProperties2KHR; + }; + + struct PhysicalDeviceExternalBufferInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalBufferInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo(VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), usage( usage_ ), handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDeviceExternalBufferInfo & operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceExternalBufferInfo & operator=( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceExternalBufferInfo ) ); + return *this; + } + + PhysicalDeviceExternalBufferInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExternalBufferInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PhysicalDeviceExternalBufferInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + PhysicalDeviceExternalBufferInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + + operator VkPhysicalDeviceExternalBufferInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalBufferInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceExternalBufferInfo const& ) const = default; +#else + bool operator==( PhysicalDeviceExternalBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( usage == rhs.usage ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( PhysicalDeviceExternalBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + + }; + static_assert( sizeof( PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalBufferInfo; + }; + using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo; + + struct ExternalMemoryProperties + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryProperties(VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes_ = {}) VULKAN_HPP_NOEXCEPT + : externalMemoryFeatures( externalMemoryFeatures_ ), exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ), compatibleHandleTypes( compatibleHandleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryProperties( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ExternalMemoryProperties & operator=( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExternalMemoryProperties & operator=( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ExternalMemoryProperties ) ); + return *this; + } + + + operator VkExternalMemoryProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExternalMemoryProperties const& ) const = default; +#else + bool operator==( ExternalMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( externalMemoryFeatures == rhs.externalMemoryFeatures ) + && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) + && ( compatibleHandleTypes == rhs.compatibleHandleTypes ); + } + + bool operator!=( ExternalMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes = {}; + + }; + static_assert( sizeof( ExternalMemoryProperties ) == sizeof( VkExternalMemoryProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + using ExternalMemoryPropertiesKHR = ExternalMemoryProperties; + + struct ExternalBufferProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalBufferProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalBufferProperties(VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {}) VULKAN_HPP_NOEXCEPT + : externalMemoryProperties( externalMemoryProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalBufferProperties( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalBufferProperties( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ExternalBufferProperties & operator=( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExternalBufferProperties & operator=( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ExternalBufferProperties ) ); + return *this; + } + + + operator VkExternalBufferProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalBufferProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExternalBufferProperties const& ) const = default; +#else + bool operator==( ExternalBufferProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( externalMemoryProperties == rhs.externalMemoryProperties ); + } + + bool operator!=( ExternalBufferProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalBufferProperties; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {}; + + }; + static_assert( sizeof( ExternalBufferProperties ) == sizeof( VkExternalBufferProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExternalBufferProperties; + }; + using ExternalBufferPropertiesKHR = ExternalBufferProperties; + + struct PhysicalDeviceExternalFenceInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFenceInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDeviceExternalFenceInfo & operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceExternalFenceInfo & operator=( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceExternalFenceInfo ) ); + return *this; + } + + PhysicalDeviceExternalFenceInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExternalFenceInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + + operator VkPhysicalDeviceExternalFenceInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalFenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceExternalFenceInfo const& ) const = default; +#else + bool operator==( PhysicalDeviceExternalFenceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( PhysicalDeviceExternalFenceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + + }; + static_assert( sizeof( PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalFenceInfo; + }; + using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo; + + struct ExternalFenceProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFenceProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalFenceProperties(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures_ = {}) VULKAN_HPP_NOEXCEPT + : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ), compatibleHandleTypes( compatibleHandleTypes_ ), externalFenceFeatures( externalFenceFeatures_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalFenceProperties( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalFenceProperties( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ExternalFenceProperties & operator=( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExternalFenceProperties & operator=( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ExternalFenceProperties ) ); + return *this; + } + + + operator VkExternalFenceProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalFenceProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExternalFenceProperties const& ) const = default; +#else + bool operator==( ExternalFenceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) + && ( compatibleHandleTypes == rhs.compatibleHandleTypes ) + && ( externalFenceFeatures == rhs.externalFenceFeatures ); + } + + bool operator!=( ExternalFenceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFenceProperties; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures = {}; + + }; + static_assert( sizeof( ExternalFenceProperties ) == sizeof( VkExternalFenceProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExternalFenceProperties; + }; + using ExternalFencePropertiesKHR = ExternalFenceProperties; + + struct ImageFormatProperties + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageFormatProperties(VULKAN_HPP_NAMESPACE::Extent3D maxExtent_ = {}, uint32_t maxMipLevels_ = {}, uint32_t maxArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize_ = {}) VULKAN_HPP_NOEXCEPT + : maxExtent( maxExtent_ ), maxMipLevels( maxMipLevels_ ), maxArrayLayers( maxArrayLayers_ ), sampleCounts( sampleCounts_ ), maxResourceSize( maxResourceSize_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageFormatProperties( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatProperties( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ImageFormatProperties & operator=( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageFormatProperties & operator=( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImageFormatProperties ) ); + return *this; + } + + + operator VkImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageFormatProperties const& ) const = default; +#else + bool operator==( ImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( maxExtent == rhs.maxExtent ) + && ( maxMipLevels == rhs.maxMipLevels ) + && ( maxArrayLayers == rhs.maxArrayLayers ) + && ( sampleCounts == rhs.sampleCounts ) + && ( maxResourceSize == rhs.maxResourceSize ); + } + + bool operator!=( ImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::Extent3D maxExtent = {}; + uint32_t maxMipLevels = {}; + uint32_t maxArrayLayers = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize = {}; + + }; + static_assert( sizeof( ImageFormatProperties ) == sizeof( VkImageFormatProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ExternalImageFormatPropertiesNV + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV(VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = {}) VULKAN_HPP_NOEXCEPT + : imageFormatProperties( imageFormatProperties_ ), externalMemoryFeatures( externalMemoryFeatures_ ), exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ), compatibleHandleTypes( compatibleHandleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalImageFormatPropertiesNV( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ExternalImageFormatPropertiesNV & operator=( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExternalImageFormatPropertiesNV & operator=( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ExternalImageFormatPropertiesNV ) ); + return *this; + } + + + operator VkExternalImageFormatPropertiesNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExternalImageFormatPropertiesNV const& ) const = default; +#else + bool operator==( ExternalImageFormatPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( imageFormatProperties == rhs.imageFormatProperties ) + && ( externalMemoryFeatures == rhs.externalMemoryFeatures ) + && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) + && ( compatibleHandleTypes == rhs.compatibleHandleTypes ); + } + + bool operator!=( ExternalImageFormatPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes = {}; + + }; + static_assert( sizeof( ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceExternalSemaphoreInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalSemaphoreInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDeviceExternalSemaphoreInfo & operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceExternalSemaphoreInfo & operator=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceExternalSemaphoreInfo ) ); + return *this; + } + + PhysicalDeviceExternalSemaphoreInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExternalSemaphoreInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + + operator VkPhysicalDeviceExternalSemaphoreInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalSemaphoreInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceExternalSemaphoreInfo const& ) const = default; +#else + bool operator==( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + + }; + static_assert( sizeof( PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalSemaphoreInfo; + }; + using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo; + + struct ExternalSemaphoreProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalSemaphoreProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = {}) VULKAN_HPP_NOEXCEPT + : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ), compatibleHandleTypes( compatibleHandleTypes_ ), externalSemaphoreFeatures( externalSemaphoreFeatures_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalSemaphoreProperties( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ExternalSemaphoreProperties & operator=( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExternalSemaphoreProperties & operator=( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ExternalSemaphoreProperties ) ); + return *this; + } + + + operator VkExternalSemaphoreProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalSemaphoreProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExternalSemaphoreProperties const& ) const = default; +#else + bool operator==( ExternalSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) + && ( compatibleHandleTypes == rhs.compatibleHandleTypes ) + && ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures ); + } + + bool operator!=( ExternalSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalSemaphoreProperties; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures = {}; + + }; + static_assert( sizeof( ExternalSemaphoreProperties ) == sizeof( VkExternalSemaphoreProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExternalSemaphoreProperties; + }; + using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties; + + struct PhysicalDeviceFeatures2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFeatures2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = {}) VULKAN_HPP_NOEXCEPT + : features( features_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDeviceFeatures2 & operator=( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceFeatures2 & operator=( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceFeatures2 ) ); + return *this; + } + + PhysicalDeviceFeatures2 & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceFeatures2 & setFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const & features_ ) VULKAN_HPP_NOEXCEPT + { + features = features_; + return *this; + } + + + operator VkPhysicalDeviceFeatures2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFeatures2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceFeatures2 const& ) const = default; +#else + bool operator==( PhysicalDeviceFeatures2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( features == rhs.features ); + } + + bool operator!=( PhysicalDeviceFeatures2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFeatures2; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features = {}; + + }; + static_assert( sizeof( PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFeatures2; + }; + using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2; + + struct FormatProperties + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FormatProperties(VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures_ = {}) VULKAN_HPP_NOEXCEPT + : linearTilingFeatures( linearTilingFeatures_ ), optimalTilingFeatures( optimalTilingFeatures_ ), bufferFeatures( bufferFeatures_ ) + {} + + VULKAN_HPP_CONSTEXPR FormatProperties( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FormatProperties( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + FormatProperties & operator=( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + FormatProperties & operator=( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( FormatProperties ) ); + return *this; + } + + + operator VkFormatProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( FormatProperties const& ) const = default; +#else + bool operator==( FormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( linearTilingFeatures == rhs.linearTilingFeatures ) + && ( optimalTilingFeatures == rhs.optimalTilingFeatures ) + && ( bufferFeatures == rhs.bufferFeatures ); + } + + bool operator!=( FormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures = {}; + + }; + static_assert( sizeof( FormatProperties ) == sizeof( VkFormatProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct FormatProperties2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FormatProperties2(VULKAN_HPP_NAMESPACE::FormatProperties formatProperties_ = {}) VULKAN_HPP_NOEXCEPT + : formatProperties( formatProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR FormatProperties2( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FormatProperties2( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + FormatProperties2 & operator=( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + FormatProperties2 & operator=( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( FormatProperties2 ) ); + return *this; + } + + + operator VkFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFormatProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( FormatProperties2 const& ) const = default; +#else + bool operator==( FormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( formatProperties == rhs.formatProperties ); + } + + bool operator!=( FormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties2; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::FormatProperties formatProperties = {}; + + }; + static_assert( sizeof( FormatProperties2 ) == sizeof( VkFormatProperties2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = FormatProperties2; + }; + using FormatProperties2KHR = FormatProperties2; + + struct PhysicalDeviceImageFormatInfo2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageFormatInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}) VULKAN_HPP_NOEXCEPT + : format( format_ ), type( type_ ), tiling( tiling_ ), usage( usage_ ), flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDeviceImageFormatInfo2 & operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceImageFormatInfo2 & operator=( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceImageFormatInfo2 ) ); + return *this; + } + + PhysicalDeviceImageFormatInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + PhysicalDeviceImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + PhysicalDeviceImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT + { + tiling = tiling_; + return *this; + } + + PhysicalDeviceImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + PhysicalDeviceImageFormatInfo2 & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + + operator VkPhysicalDeviceImageFormatInfo2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceImageFormatInfo2 const& ) const = default; +#else + bool operator==( PhysicalDeviceImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( format == rhs.format ) + && ( type == rhs.type ) + && ( tiling == rhs.tiling ) + && ( usage == rhs.usage ) + && ( flags == rhs.flags ); + } + + bool operator!=( PhysicalDeviceImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D; + VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; + + }; + static_assert( sizeof( PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceImageFormatInfo2; + }; + using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2; + + struct ImageFormatProperties2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatProperties2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageFormatProperties2(VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}) VULKAN_HPP_NOEXCEPT + : imageFormatProperties( imageFormatProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageFormatProperties2( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatProperties2( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ImageFormatProperties2 & operator=( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageFormatProperties2 & operator=( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImageFormatProperties2 ) ); + return *this; + } + + + operator VkImageFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageFormatProperties2 const& ) const = default; +#else + bool operator==( ImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( imageFormatProperties == rhs.imageFormatProperties ); + } + + bool operator!=( ImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatProperties2; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {}; + + }; + static_assert( sizeof( ImageFormatProperties2 ) == sizeof( VkImageFormatProperties2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageFormatProperties2; + }; + using ImageFormatProperties2KHR = ImageFormatProperties2; + + struct MemoryType + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryType(VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = {}, uint32_t heapIndex_ = {}) VULKAN_HPP_NOEXCEPT + : propertyFlags( propertyFlags_ ), heapIndex( heapIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryType( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryType( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + MemoryType & operator=( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryType & operator=( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( MemoryType ) ); + return *this; + } + + + operator VkMemoryType const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryType &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryType const& ) const = default; +#else + bool operator==( MemoryType const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( propertyFlags == rhs.propertyFlags ) + && ( heapIndex == rhs.heapIndex ); + } + + bool operator!=( MemoryType const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags = {}; + uint32_t heapIndex = {}; + + }; + static_assert( sizeof( MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct MemoryHeap + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryHeap(VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags_ = {}) VULKAN_HPP_NOEXCEPT + : size( size_ ), flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryHeap( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryHeap( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + MemoryHeap & operator=( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryHeap & operator=( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( MemoryHeap ) ); + return *this; + } + + + operator VkMemoryHeap const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryHeap &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryHeap const& ) const = default; +#else + bool operator==( MemoryHeap const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( size == rhs.size ) + && ( flags == rhs.flags ); + } + + bool operator!=( MemoryHeap const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags = {}; + + }; + static_assert( sizeof( MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceMemoryProperties + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties(uint32_t memoryTypeCount_ = {}, std::array const& memoryTypes_ = {}, uint32_t memoryHeapCount_ = {}, std::array const& memoryHeaps_ = {}) VULKAN_HPP_NOEXCEPT + : memoryTypeCount( memoryTypeCount_ ), memoryTypes( memoryTypes_ ), memoryHeapCount( memoryHeapCount_ ), memoryHeaps( memoryHeaps_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryProperties( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDeviceMemoryProperties & operator=( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceMemoryProperties & operator=( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceMemoryProperties ) ); + return *this; + } + + + operator VkPhysicalDeviceMemoryProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceMemoryProperties const& ) const = default; +#else + bool operator==( PhysicalDeviceMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( memoryTypeCount == rhs.memoryTypeCount ) + && ( memoryTypes == rhs.memoryTypes ) + && ( memoryHeapCount == rhs.memoryHeapCount ) + && ( memoryHeaps == rhs.memoryHeaps ); + } + + bool operator!=( PhysicalDeviceMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + uint32_t memoryTypeCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D memoryTypes = {}; + uint32_t memoryHeapCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D memoryHeaps = {}; + + }; + static_assert( sizeof( PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceMemoryProperties2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryProperties2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = {}) VULKAN_HPP_NOEXCEPT + : memoryProperties( memoryProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryProperties2( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDeviceMemoryProperties2 & operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceMemoryProperties2 & operator=( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceMemoryProperties2 ) ); + return *this; + } + + + operator VkPhysicalDeviceMemoryProperties2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceMemoryProperties2 const& ) const = default; +#else + bool operator==( PhysicalDeviceMemoryProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memoryProperties == rhs.memoryProperties ); + } + + bool operator!=( PhysicalDeviceMemoryProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties = {}; + + }; + static_assert( sizeof( PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMemoryProperties2; + }; + using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2; + + struct MultisamplePropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultisamplePropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT(VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}) VULKAN_HPP_NOEXCEPT + : maxSampleLocationGridSize( maxSampleLocationGridSize_ ) + {} + + VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + MultisamplePropertiesEXT & operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MultisamplePropertiesEXT & operator=( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( MultisamplePropertiesEXT ) ); + return *this; + } + + + operator VkMultisamplePropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MultisamplePropertiesEXT const& ) const = default; +#else + bool operator==( MultisamplePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ); + } + + bool operator!=( MultisamplePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisamplePropertiesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; + + }; + static_assert( sizeof( MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MultisamplePropertiesEXT; + }; + + struct PhysicalDeviceLimits + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits(uint32_t maxImageDimension1D_ = {}, uint32_t maxImageDimension2D_ = {}, uint32_t maxImageDimension3D_ = {}, uint32_t maxImageDimensionCube_ = {}, uint32_t maxImageArrayLayers_ = {}, uint32_t maxTexelBufferElements_ = {}, uint32_t maxUniformBufferRange_ = {}, uint32_t maxStorageBufferRange_ = {}, uint32_t maxPushConstantsSize_ = {}, uint32_t maxMemoryAllocationCount_ = {}, uint32_t maxSamplerAllocationCount_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize_ = {}, uint32_t maxBoundDescriptorSets_ = {}, uint32_t maxPerStageDescriptorSamplers_ = {}, uint32_t maxPerStageDescriptorUniformBuffers_ = {}, uint32_t maxPerStageDescriptorStorageBuffers_ = {}, uint32_t maxPerStageDescriptorSampledImages_ = {}, uint32_t maxPerStageDescriptorStorageImages_ = {}, uint32_t maxPerStageDescriptorInputAttachments_ = {}, uint32_t maxPerStageResources_ = {}, uint32_t maxDescriptorSetSamplers_ = {}, uint32_t maxDescriptorSetUniformBuffers_ = {}, uint32_t maxDescriptorSetUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetStorageBuffers_ = {}, uint32_t maxDescriptorSetStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetSampledImages_ = {}, uint32_t maxDescriptorSetStorageImages_ = {}, uint32_t maxDescriptorSetInputAttachments_ = {}, uint32_t maxVertexInputAttributes_ = {}, uint32_t maxVertexInputBindings_ = {}, uint32_t maxVertexInputAttributeOffset_ = {}, uint32_t maxVertexInputBindingStride_ = {}, uint32_t maxVertexOutputComponents_ = {}, uint32_t maxTessellationGenerationLevel_ = {}, uint32_t maxTessellationPatchSize_ = {}, uint32_t maxTessellationControlPerVertexInputComponents_ = {}, uint32_t maxTessellationControlPerVertexOutputComponents_ = {}, uint32_t maxTessellationControlPerPatchOutputComponents_ = {}, uint32_t maxTessellationControlTotalOutputComponents_ = {}, uint32_t maxTessellationEvaluationInputComponents_ = {}, uint32_t maxTessellationEvaluationOutputComponents_ = {}, uint32_t maxGeometryShaderInvocations_ = {}, uint32_t maxGeometryInputComponents_ = {}, uint32_t maxGeometryOutputComponents_ = {}, uint32_t maxGeometryOutputVertices_ = {}, uint32_t maxGeometryTotalOutputComponents_ = {}, uint32_t maxFragmentInputComponents_ = {}, uint32_t maxFragmentOutputAttachments_ = {}, uint32_t maxFragmentDualSrcAttachments_ = {}, uint32_t maxFragmentCombinedOutputResources_ = {}, uint32_t maxComputeSharedMemorySize_ = {}, std::array const& maxComputeWorkGroupCount_ = {}, uint32_t maxComputeWorkGroupInvocations_ = {}, std::array const& maxComputeWorkGroupSize_ = {}, uint32_t subPixelPrecisionBits_ = {}, uint32_t subTexelPrecisionBits_ = {}, uint32_t mipmapPrecisionBits_ = {}, uint32_t maxDrawIndexedIndexValue_ = {}, uint32_t maxDrawIndirectCount_ = {}, float maxSamplerLodBias_ = {}, float maxSamplerAnisotropy_ = {}, uint32_t maxViewports_ = {}, std::array const& maxViewportDimensions_ = {}, std::array const& viewportBoundsRange_ = {}, uint32_t viewportSubPixelBits_ = {}, size_t minMemoryMapAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment_ = {}, int32_t minTexelOffset_ = {}, uint32_t maxTexelOffset_ = {}, int32_t minTexelGatherOffset_ = {}, uint32_t maxTexelGatherOffset_ = {}, float minInterpolationOffset_ = {}, float maxInterpolationOffset_ = {}, uint32_t subPixelInterpolationOffsetBits_ = {}, uint32_t maxFramebufferWidth_ = {}, uint32_t maxFramebufferHeight_ = {}, uint32_t maxFramebufferLayers_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts_ = {}, uint32_t maxColorAttachments_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts_ = {}, uint32_t maxSampleMaskWords_ = {}, VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics_ = {}, float timestampPeriod_ = {}, uint32_t maxClipDistances_ = {}, uint32_t maxCullDistances_ = {}, uint32_t maxCombinedClipAndCullDistances_ = {}, uint32_t discreteQueuePriorities_ = {}, std::array const& pointSizeRange_ = {}, std::array const& lineWidthRange_ = {}, float pointSizeGranularity_ = {}, float lineWidthGranularity_ = {}, VULKAN_HPP_NAMESPACE::Bool32 strictLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize_ = {}) VULKAN_HPP_NOEXCEPT + : maxImageDimension1D( maxImageDimension1D_ ), maxImageDimension2D( maxImageDimension2D_ ), maxImageDimension3D( maxImageDimension3D_ ), maxImageDimensionCube( maxImageDimensionCube_ ), maxImageArrayLayers( maxImageArrayLayers_ ), maxTexelBufferElements( maxTexelBufferElements_ ), maxUniformBufferRange( maxUniformBufferRange_ ), maxStorageBufferRange( maxStorageBufferRange_ ), maxPushConstantsSize( maxPushConstantsSize_ ), maxMemoryAllocationCount( maxMemoryAllocationCount_ ), maxSamplerAllocationCount( maxSamplerAllocationCount_ ), bufferImageGranularity( bufferImageGranularity_ ), sparseAddressSpaceSize( sparseAddressSpaceSize_ ), maxBoundDescriptorSets( maxBoundDescriptorSets_ ), maxPerStageDescriptorSamplers( maxPerStageDescriptorSamplers_ ), maxPerStageDescriptorUniformBuffers( maxPerStageDescriptorUniformBuffers_ ), maxPerStageDescriptorStorageBuffers( maxPerStageDescriptorStorageBuffers_ ), maxPerStageDescriptorSampledImages( maxPerStageDescriptorSampledImages_ ), maxPerStageDescriptorStorageImages( maxPerStageDescriptorStorageImages_ ), maxPerStageDescriptorInputAttachments( maxPerStageDescriptorInputAttachments_ ), maxPerStageResources( maxPerStageResources_ ), maxDescriptorSetSamplers( maxDescriptorSetSamplers_ ), maxDescriptorSetUniformBuffers( maxDescriptorSetUniformBuffers_ ), maxDescriptorSetUniformBuffersDynamic( maxDescriptorSetUniformBuffersDynamic_ ), maxDescriptorSetStorageBuffers( maxDescriptorSetStorageBuffers_ ), maxDescriptorSetStorageBuffersDynamic( maxDescriptorSetStorageBuffersDynamic_ ), maxDescriptorSetSampledImages( maxDescriptorSetSampledImages_ ), maxDescriptorSetStorageImages( maxDescriptorSetStorageImages_ ), maxDescriptorSetInputAttachments( maxDescriptorSetInputAttachments_ ), maxVertexInputAttributes( maxVertexInputAttributes_ ), maxVertexInputBindings( maxVertexInputBindings_ ), maxVertexInputAttributeOffset( maxVertexInputAttributeOffset_ ), maxVertexInputBindingStride( maxVertexInputBindingStride_ ), maxVertexOutputComponents( maxVertexOutputComponents_ ), maxTessellationGenerationLevel( maxTessellationGenerationLevel_ ), maxTessellationPatchSize( maxTessellationPatchSize_ ), maxTessellationControlPerVertexInputComponents( maxTessellationControlPerVertexInputComponents_ ), maxTessellationControlPerVertexOutputComponents( maxTessellationControlPerVertexOutputComponents_ ), maxTessellationControlPerPatchOutputComponents( maxTessellationControlPerPatchOutputComponents_ ), maxTessellationControlTotalOutputComponents( maxTessellationControlTotalOutputComponents_ ), maxTessellationEvaluationInputComponents( maxTessellationEvaluationInputComponents_ ), maxTessellationEvaluationOutputComponents( maxTessellationEvaluationOutputComponents_ ), maxGeometryShaderInvocations( maxGeometryShaderInvocations_ ), maxGeometryInputComponents( maxGeometryInputComponents_ ), maxGeometryOutputComponents( maxGeometryOutputComponents_ ), maxGeometryOutputVertices( maxGeometryOutputVertices_ ), maxGeometryTotalOutputComponents( maxGeometryTotalOutputComponents_ ), maxFragmentInputComponents( maxFragmentInputComponents_ ), maxFragmentOutputAttachments( maxFragmentOutputAttachments_ ), maxFragmentDualSrcAttachments( maxFragmentDualSrcAttachments_ ), maxFragmentCombinedOutputResources( maxFragmentCombinedOutputResources_ ), maxComputeSharedMemorySize( maxComputeSharedMemorySize_ ), maxComputeWorkGroupCount( maxComputeWorkGroupCount_ ), maxComputeWorkGroupInvocations( maxComputeWorkGroupInvocations_ ), maxComputeWorkGroupSize( maxComputeWorkGroupSize_ ), subPixelPrecisionBits( subPixelPrecisionBits_ ), subTexelPrecisionBits( subTexelPrecisionBits_ ), mipmapPrecisionBits( mipmapPrecisionBits_ ), maxDrawIndexedIndexValue( maxDrawIndexedIndexValue_ ), maxDrawIndirectCount( maxDrawIndirectCount_ ), maxSamplerLodBias( maxSamplerLodBias_ ), maxSamplerAnisotropy( maxSamplerAnisotropy_ ), maxViewports( maxViewports_ ), maxViewportDimensions( maxViewportDimensions_ ), viewportBoundsRange( viewportBoundsRange_ ), viewportSubPixelBits( viewportSubPixelBits_ ), minMemoryMapAlignment( minMemoryMapAlignment_ ), minTexelBufferOffsetAlignment( minTexelBufferOffsetAlignment_ ), minUniformBufferOffsetAlignment( minUniformBufferOffsetAlignment_ ), minStorageBufferOffsetAlignment( minStorageBufferOffsetAlignment_ ), minTexelOffset( minTexelOffset_ ), maxTexelOffset( maxTexelOffset_ ), minTexelGatherOffset( minTexelGatherOffset_ ), maxTexelGatherOffset( maxTexelGatherOffset_ ), minInterpolationOffset( minInterpolationOffset_ ), maxInterpolationOffset( maxInterpolationOffset_ ), subPixelInterpolationOffsetBits( subPixelInterpolationOffsetBits_ ), maxFramebufferWidth( maxFramebufferWidth_ ), maxFramebufferHeight( maxFramebufferHeight_ ), maxFramebufferLayers( maxFramebufferLayers_ ), framebufferColorSampleCounts( framebufferColorSampleCounts_ ), framebufferDepthSampleCounts( framebufferDepthSampleCounts_ ), framebufferStencilSampleCounts( framebufferStencilSampleCounts_ ), framebufferNoAttachmentsSampleCounts( framebufferNoAttachmentsSampleCounts_ ), maxColorAttachments( maxColorAttachments_ ), sampledImageColorSampleCounts( sampledImageColorSampleCounts_ ), sampledImageIntegerSampleCounts( sampledImageIntegerSampleCounts_ ), sampledImageDepthSampleCounts( sampledImageDepthSampleCounts_ ), sampledImageStencilSampleCounts( sampledImageStencilSampleCounts_ ), storageImageSampleCounts( storageImageSampleCounts_ ), maxSampleMaskWords( maxSampleMaskWords_ ), timestampComputeAndGraphics( timestampComputeAndGraphics_ ), timestampPeriod( timestampPeriod_ ), maxClipDistances( maxClipDistances_ ), maxCullDistances( maxCullDistances_ ), maxCombinedClipAndCullDistances( maxCombinedClipAndCullDistances_ ), discreteQueuePriorities( discreteQueuePriorities_ ), pointSizeRange( pointSizeRange_ ), lineWidthRange( lineWidthRange_ ), pointSizeGranularity( pointSizeGranularity_ ), lineWidthGranularity( lineWidthGranularity_ ), strictLines( strictLines_ ), standardSampleLocations( standardSampleLocations_ ), optimalBufferCopyOffsetAlignment( optimalBufferCopyOffsetAlignment_ ), optimalBufferCopyRowPitchAlignment( optimalBufferCopyRowPitchAlignment_ ), nonCoherentAtomSize( nonCoherentAtomSize_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLimits( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDeviceLimits & operator=( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceLimits & operator=( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceLimits ) ); + return *this; + } + + + operator VkPhysicalDeviceLimits const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLimits &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceLimits const& ) const = default; +#else + bool operator==( PhysicalDeviceLimits const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( maxImageDimension1D == rhs.maxImageDimension1D ) + && ( maxImageDimension2D == rhs.maxImageDimension2D ) + && ( maxImageDimension3D == rhs.maxImageDimension3D ) + && ( maxImageDimensionCube == rhs.maxImageDimensionCube ) + && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) + && ( maxTexelBufferElements == rhs.maxTexelBufferElements ) + && ( maxUniformBufferRange == rhs.maxUniformBufferRange ) + && ( maxStorageBufferRange == rhs.maxStorageBufferRange ) + && ( maxPushConstantsSize == rhs.maxPushConstantsSize ) + && ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount ) + && ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount ) + && ( bufferImageGranularity == rhs.bufferImageGranularity ) + && ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize ) + && ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets ) + && ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers ) + && ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers ) + && ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers ) + && ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages ) + && ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages ) + && ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments ) + && ( maxPerStageResources == rhs.maxPerStageResources ) + && ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers ) + && ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers ) + && ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic ) + && ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers ) + && ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic ) + && ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages ) + && ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages ) + && ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments ) + && ( maxVertexInputAttributes == rhs.maxVertexInputAttributes ) + && ( maxVertexInputBindings == rhs.maxVertexInputBindings ) + && ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset ) + && ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride ) + && ( maxVertexOutputComponents == rhs.maxVertexOutputComponents ) + && ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel ) + && ( maxTessellationPatchSize == rhs.maxTessellationPatchSize ) + && ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents ) + && ( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents ) + && ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents ) + && ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents ) + && ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents ) + && ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents ) + && ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations ) + && ( maxGeometryInputComponents == rhs.maxGeometryInputComponents ) + && ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents ) + && ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices ) + && ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents ) + && ( maxFragmentInputComponents == rhs.maxFragmentInputComponents ) + && ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments ) + && ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments ) + && ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources ) + && ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize ) + && ( maxComputeWorkGroupCount == rhs.maxComputeWorkGroupCount ) + && ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations ) + && ( maxComputeWorkGroupSize == rhs.maxComputeWorkGroupSize ) + && ( subPixelPrecisionBits == rhs.subPixelPrecisionBits ) + && ( subTexelPrecisionBits == rhs.subTexelPrecisionBits ) + && ( mipmapPrecisionBits == rhs.mipmapPrecisionBits ) + && ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue ) + && ( maxDrawIndirectCount == rhs.maxDrawIndirectCount ) + && ( maxSamplerLodBias == rhs.maxSamplerLodBias ) + && ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy ) + && ( maxViewports == rhs.maxViewports ) + && ( maxViewportDimensions == rhs.maxViewportDimensions ) + && ( viewportBoundsRange == rhs.viewportBoundsRange ) + && ( viewportSubPixelBits == rhs.viewportSubPixelBits ) + && ( minMemoryMapAlignment == rhs.minMemoryMapAlignment ) + && ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment ) + && ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment ) + && ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment ) + && ( minTexelOffset == rhs.minTexelOffset ) + && ( maxTexelOffset == rhs.maxTexelOffset ) + && ( minTexelGatherOffset == rhs.minTexelGatherOffset ) + && ( maxTexelGatherOffset == rhs.maxTexelGatherOffset ) + && ( minInterpolationOffset == rhs.minInterpolationOffset ) + && ( maxInterpolationOffset == rhs.maxInterpolationOffset ) + && ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits ) + && ( maxFramebufferWidth == rhs.maxFramebufferWidth ) + && ( maxFramebufferHeight == rhs.maxFramebufferHeight ) + && ( maxFramebufferLayers == rhs.maxFramebufferLayers ) + && ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts ) + && ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts ) + && ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts ) + && ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts ) + && ( maxColorAttachments == rhs.maxColorAttachments ) + && ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts ) + && ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts ) + && ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts ) + && ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts ) + && ( storageImageSampleCounts == rhs.storageImageSampleCounts ) + && ( maxSampleMaskWords == rhs.maxSampleMaskWords ) + && ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics ) + && ( timestampPeriod == rhs.timestampPeriod ) + && ( maxClipDistances == rhs.maxClipDistances ) + && ( maxCullDistances == rhs.maxCullDistances ) + && ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances ) + && ( discreteQueuePriorities == rhs.discreteQueuePriorities ) + && ( pointSizeRange == rhs.pointSizeRange ) + && ( lineWidthRange == rhs.lineWidthRange ) + && ( pointSizeGranularity == rhs.pointSizeGranularity ) + && ( lineWidthGranularity == rhs.lineWidthGranularity ) + && ( strictLines == rhs.strictLines ) + && ( standardSampleLocations == rhs.standardSampleLocations ) + && ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment ) + && ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment ) + && ( nonCoherentAtomSize == rhs.nonCoherentAtomSize ); + } + + bool operator!=( PhysicalDeviceLimits const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + uint32_t maxImageDimension1D = {}; + uint32_t maxImageDimension2D = {}; + uint32_t maxImageDimension3D = {}; + uint32_t maxImageDimensionCube = {}; + uint32_t maxImageArrayLayers = {}; + uint32_t maxTexelBufferElements = {}; + uint32_t maxUniformBufferRange = {}; + uint32_t maxStorageBufferRange = {}; + uint32_t maxPushConstantsSize = {}; + uint32_t maxMemoryAllocationCount = {}; + uint32_t maxSamplerAllocationCount = {}; + VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize = {}; + uint32_t maxBoundDescriptorSets = {}; + uint32_t maxPerStageDescriptorSamplers = {}; + uint32_t maxPerStageDescriptorUniformBuffers = {}; + uint32_t maxPerStageDescriptorStorageBuffers = {}; + uint32_t maxPerStageDescriptorSampledImages = {}; + uint32_t maxPerStageDescriptorStorageImages = {}; + uint32_t maxPerStageDescriptorInputAttachments = {}; + uint32_t maxPerStageResources = {}; + uint32_t maxDescriptorSetSamplers = {}; + uint32_t maxDescriptorSetUniformBuffers = {}; + uint32_t maxDescriptorSetUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetStorageBuffers = {}; + uint32_t maxDescriptorSetStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetSampledImages = {}; + uint32_t maxDescriptorSetStorageImages = {}; + uint32_t maxDescriptorSetInputAttachments = {}; + uint32_t maxVertexInputAttributes = {}; + uint32_t maxVertexInputBindings = {}; + uint32_t maxVertexInputAttributeOffset = {}; + uint32_t maxVertexInputBindingStride = {}; + uint32_t maxVertexOutputComponents = {}; + uint32_t maxTessellationGenerationLevel = {}; + uint32_t maxTessellationPatchSize = {}; + uint32_t maxTessellationControlPerVertexInputComponents = {}; + uint32_t maxTessellationControlPerVertexOutputComponents = {}; + uint32_t maxTessellationControlPerPatchOutputComponents = {}; + uint32_t maxTessellationControlTotalOutputComponents = {}; + uint32_t maxTessellationEvaluationInputComponents = {}; + uint32_t maxTessellationEvaluationOutputComponents = {}; + uint32_t maxGeometryShaderInvocations = {}; + uint32_t maxGeometryInputComponents = {}; + uint32_t maxGeometryOutputComponents = {}; + uint32_t maxGeometryOutputVertices = {}; + uint32_t maxGeometryTotalOutputComponents = {}; + uint32_t maxFragmentInputComponents = {}; + uint32_t maxFragmentOutputAttachments = {}; + uint32_t maxFragmentDualSrcAttachments = {}; + uint32_t maxFragmentCombinedOutputResources = {}; + uint32_t maxComputeSharedMemorySize = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxComputeWorkGroupCount = {}; + uint32_t maxComputeWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxComputeWorkGroupSize = {}; + uint32_t subPixelPrecisionBits = {}; + uint32_t subTexelPrecisionBits = {}; + uint32_t mipmapPrecisionBits = {}; + uint32_t maxDrawIndexedIndexValue = {}; + uint32_t maxDrawIndirectCount = {}; + float maxSamplerLodBias = {}; + float maxSamplerAnisotropy = {}; + uint32_t maxViewports = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxViewportDimensions = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D viewportBoundsRange = {}; + uint32_t viewportSubPixelBits = {}; + size_t minMemoryMapAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment = {}; + int32_t minTexelOffset = {}; + uint32_t maxTexelOffset = {}; + int32_t minTexelGatherOffset = {}; + uint32_t maxTexelGatherOffset = {}; + float minInterpolationOffset = {}; + float maxInterpolationOffset = {}; + uint32_t subPixelInterpolationOffsetBits = {}; + uint32_t maxFramebufferWidth = {}; + uint32_t maxFramebufferHeight = {}; + uint32_t maxFramebufferLayers = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts = {}; + uint32_t maxColorAttachments = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts = {}; + uint32_t maxSampleMaskWords = {}; + VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics = {}; + float timestampPeriod = {}; + uint32_t maxClipDistances = {}; + uint32_t maxCullDistances = {}; + uint32_t maxCombinedClipAndCullDistances = {}; + uint32_t discreteQueuePriorities = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D pointSizeRange = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D lineWidthRange = {}; + float pointSizeGranularity = {}; + float lineWidthGranularity = {}; + VULKAN_HPP_NAMESPACE::Bool32 strictLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations = {}; + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize = {}; + + }; + static_assert( sizeof( PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceSparseProperties + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties(VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = {}) VULKAN_HPP_NOEXCEPT + : residencyStandard2DBlockShape( residencyStandard2DBlockShape_ ), residencyStandard2DMultisampleBlockShape( residencyStandard2DMultisampleBlockShape_ ), residencyStandard3DBlockShape( residencyStandard3DBlockShape_ ), residencyAlignedMipSize( residencyAlignedMipSize_ ), residencyNonResidentStrict( residencyNonResidentStrict_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSparseProperties( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDeviceSparseProperties & operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceSparseProperties & operator=( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceSparseProperties ) ); + return *this; + } + + + operator VkPhysicalDeviceSparseProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceSparseProperties const& ) const = default; +#else + bool operator==( PhysicalDeviceSparseProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape ) + && ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape ) + && ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape ) + && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize ) + && ( residencyNonResidentStrict == rhs.residencyNonResidentStrict ); + } + + bool operator!=( PhysicalDeviceSparseProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict = {}; + + }; + static_assert( sizeof( PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceProperties + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties(uint32_t apiVersion_ = {}, uint32_t driverVersion_ = {}, uint32_t vendorID_ = {}, uint32_t deviceID_ = {}, VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther, std::array const& deviceName_ = {}, std::array const& pipelineCacheUUID_ = {}, VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {}, VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {}) VULKAN_HPP_NOEXCEPT + : apiVersion( apiVersion_ ), driverVersion( driverVersion_ ), vendorID( vendorID_ ), deviceID( deviceID_ ), deviceType( deviceType_ ), deviceName( deviceName_ ), pipelineCacheUUID( pipelineCacheUUID_ ), limits( limits_ ), sparseProperties( sparseProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProperties( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDeviceProperties & operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceProperties & operator=( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceProperties ) ); + return *this; + } + + + operator VkPhysicalDeviceProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceProperties const& ) const = default; +#else + bool operator==( PhysicalDeviceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( apiVersion == rhs.apiVersion ) + && ( driverVersion == rhs.driverVersion ) + && ( vendorID == rhs.vendorID ) + && ( deviceID == rhs.deviceID ) + && ( deviceType == rhs.deviceType ) + && ( deviceName == rhs.deviceName ) + && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) + && ( limits == rhs.limits ) + && ( sparseProperties == rhs.sparseProperties ); + } + + bool operator!=( PhysicalDeviceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + uint32_t apiVersion = {}; + uint32_t driverVersion = {}; + uint32_t vendorID = {}; + uint32_t deviceID = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D pipelineCacheUUID = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties = {}; + + }; + static_assert( sizeof( PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceProperties2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProperties2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {}) VULKAN_HPP_NOEXCEPT + : properties( properties_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDeviceProperties2 & operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceProperties2 & operator=( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceProperties2 ) ); + return *this; + } + + + operator VkPhysicalDeviceProperties2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceProperties2 const& ) const = default; +#else + bool operator==( PhysicalDeviceProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( properties == rhs.properties ); + } + + bool operator!=( PhysicalDeviceProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProperties2; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties = {}; + + }; + static_assert( sizeof( PhysicalDeviceProperties2 ) == sizeof( VkPhysicalDeviceProperties2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceProperties2; + }; + using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2; + + struct QueryPoolPerformanceCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR(uint32_t queueFamilyIndex_ = {}, uint32_t counterIndexCount_ = {}, const uint32_t* pCounterIndices_ = {}) VULKAN_HPP_NOEXCEPT + : queueFamilyIndex( queueFamilyIndex_ ), counterIndexCount( counterIndexCount_ ), pCounterIndices( pCounterIndices_ ) + {} + + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryPoolPerformanceCreateInfoKHR( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & counterIndices_ ) + : queueFamilyIndex( queueFamilyIndex_ ), counterIndexCount( static_cast( counterIndices_.size() ) ), pCounterIndices( counterIndices_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + QueryPoolPerformanceCreateInfoKHR & operator=( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + QueryPoolPerformanceCreateInfoKHR & operator=( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( QueryPoolPerformanceCreateInfoKHR ) ); + return *this; + } + + QueryPoolPerformanceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + QueryPoolPerformanceCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + QueryPoolPerformanceCreateInfoKHR & setCounterIndexCount( uint32_t counterIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + counterIndexCount = counterIndexCount_; + return *this; + } + + QueryPoolPerformanceCreateInfoKHR & setPCounterIndices( const uint32_t* pCounterIndices_ ) VULKAN_HPP_NOEXCEPT + { + pCounterIndices = pCounterIndices_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + QueryPoolPerformanceCreateInfoKHR & setCounterIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & counterIndices_ ) VULKAN_HPP_NOEXCEPT + { + counterIndexCount = static_cast( counterIndices_.size() ); + pCounterIndices = counterIndices_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkQueryPoolPerformanceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueryPoolPerformanceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( QueryPoolPerformanceCreateInfoKHR const& ) const = default; +#else + bool operator==( QueryPoolPerformanceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( queueFamilyIndex == rhs.queueFamilyIndex ) + && ( counterIndexCount == rhs.counterIndexCount ) + && ( pCounterIndices == rhs.pCounterIndices ); + } + + bool operator!=( QueryPoolPerformanceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceCreateInfoKHR; + const void* pNext = {}; + uint32_t queueFamilyIndex = {}; + uint32_t counterIndexCount = {}; + const uint32_t* pCounterIndices = {}; + + }; + static_assert( sizeof( QueryPoolPerformanceCreateInfoKHR ) == sizeof( VkQueryPoolPerformanceCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = QueryPoolPerformanceCreateInfoKHR; + }; + + struct QueueFamilyProperties + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyProperties(VULKAN_HPP_NAMESPACE::QueueFlags queueFlags_ = {}, uint32_t queueCount_ = {}, uint32_t timestampValidBits_ = {}, VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity_ = {}) VULKAN_HPP_NOEXCEPT + : queueFlags( queueFlags_ ), queueCount( queueCount_ ), timestampValidBits( timestampValidBits_ ), minImageTransferGranularity( minImageTransferGranularity_ ) + {} + + VULKAN_HPP_CONSTEXPR QueueFamilyProperties( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyProperties( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + QueueFamilyProperties & operator=( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + QueueFamilyProperties & operator=( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( QueueFamilyProperties ) ); + return *this; + } + + + operator VkQueueFamilyProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( QueueFamilyProperties const& ) const = default; +#else + bool operator==( QueueFamilyProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( queueFlags == rhs.queueFlags ) + && ( queueCount == rhs.queueCount ) + && ( timestampValidBits == rhs.timestampValidBits ) + && ( minImageTransferGranularity == rhs.minImageTransferGranularity ); + } + + bool operator!=( QueueFamilyProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::QueueFlags queueFlags = {}; + uint32_t queueCount = {}; + uint32_t timestampValidBits = {}; + VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity = {}; + + }; + static_assert( sizeof( QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct QueueFamilyProperties2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyProperties2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyProperties2(VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties_ = {}) VULKAN_HPP_NOEXCEPT + : queueFamilyProperties( queueFamilyProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyProperties2( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + QueueFamilyProperties2 & operator=( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + QueueFamilyProperties2 & operator=( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( QueueFamilyProperties2 ) ); + return *this; + } + + + operator VkQueueFamilyProperties2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( QueueFamilyProperties2 const& ) const = default; +#else + bool operator==( QueueFamilyProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( queueFamilyProperties == rhs.queueFamilyProperties ); + } + + bool operator!=( QueueFamilyProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyProperties2; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties = {}; + + }; + static_assert( sizeof( QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = QueueFamilyProperties2; + }; + using QueueFamilyProperties2KHR = QueueFamilyProperties2; + + struct PhysicalDeviceSparseImageFormatInfo2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSparseImageFormatInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal) VULKAN_HPP_NOEXCEPT + : format( format_ ), type( type_ ), samples( samples_ ), usage( usage_ ), tiling( tiling_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDeviceSparseImageFormatInfo2 & operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2 & operator=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceSparseImageFormatInfo2 ) ); + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT + { + samples = samples_; + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT + { + tiling = tiling_; + return *this; + } + + + operator VkPhysicalDeviceSparseImageFormatInfo2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSparseImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceSparseImageFormatInfo2 const& ) const = default; +#else + bool operator==( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( format == rhs.format ) + && ( type == rhs.type ) + && ( samples == rhs.samples ) + && ( usage == rhs.usage ) + && ( tiling == rhs.tiling ); + } + + bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + + }; + static_assert( sizeof( PhysicalDeviceSparseImageFormatInfo2 ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceSparseImageFormatInfo2; + }; + using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2; + + struct SparseImageFormatProperties2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageFormatProperties2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2(VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties_ = {}) VULKAN_HPP_NOEXCEPT + : properties( properties_ ) + {} + + VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageFormatProperties2( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SparseImageFormatProperties2 & operator=( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SparseImageFormatProperties2 & operator=( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SparseImageFormatProperties2 ) ); + return *this; + } + + + operator VkSparseImageFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SparseImageFormatProperties2 const& ) const = default; +#else + bool operator==( SparseImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( properties == rhs.properties ); + } + + bool operator!=( SparseImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageFormatProperties2; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties = {}; + + }; + static_assert( sizeof( SparseImageFormatProperties2 ) == sizeof( VkSparseImageFormatProperties2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SparseImageFormatProperties2; + }; + using SparseImageFormatProperties2KHR = SparseImageFormatProperties2; + + struct FramebufferMixedSamplesCombinationNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferMixedSamplesCombinationNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV(VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge, VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples_ = {}) VULKAN_HPP_NOEXCEPT + : coverageReductionMode( coverageReductionMode_ ), rasterizationSamples( rasterizationSamples_ ), depthStencilSamples( depthStencilSamples_ ), colorSamples( colorSamples_ ) + {} + + VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferMixedSamplesCombinationNV( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + FramebufferMixedSamplesCombinationNV & operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + FramebufferMixedSamplesCombinationNV & operator=( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( FramebufferMixedSamplesCombinationNV ) ); + return *this; + } + + + operator VkFramebufferMixedSamplesCombinationNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFramebufferMixedSamplesCombinationNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( FramebufferMixedSamplesCombinationNV const& ) const = default; +#else + bool operator==( FramebufferMixedSamplesCombinationNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( coverageReductionMode == rhs.coverageReductionMode ) + && ( rasterizationSamples == rhs.rasterizationSamples ) + && ( depthStencilSamples == rhs.depthStencilSamples ) + && ( colorSamples == rhs.colorSamples ); + } + + bool operator!=( FramebufferMixedSamplesCombinationNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferMixedSamplesCombinationNV; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples = {}; + + }; + static_assert( sizeof( FramebufferMixedSamplesCombinationNV ) == sizeof( VkFramebufferMixedSamplesCombinationNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = FramebufferMixedSamplesCombinationNV; + }; + + struct SurfaceCapabilities2EXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2EXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT(uint32_t minImageCount_ = {}, uint32_t maxImageCount_ = {}, VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, uint32_t maxImageArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = {}) VULKAN_HPP_NOEXCEPT + : minImageCount( minImageCount_ ), maxImageCount( maxImageCount_ ), currentExtent( currentExtent_ ), minImageExtent( minImageExtent_ ), maxImageExtent( maxImageExtent_ ), maxImageArrayLayers( maxImageArrayLayers_ ), supportedTransforms( supportedTransforms_ ), currentTransform( currentTransform_ ), supportedCompositeAlpha( supportedCompositeAlpha_ ), supportedUsageFlags( supportedUsageFlags_ ), supportedSurfaceCounters( supportedSurfaceCounters_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilities2EXT( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SurfaceCapabilities2EXT & operator=( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SurfaceCapabilities2EXT & operator=( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SurfaceCapabilities2EXT ) ); + return *this; + } + + + operator VkSurfaceCapabilities2EXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilities2EXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SurfaceCapabilities2EXT const& ) const = default; +#else + bool operator==( SurfaceCapabilities2EXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( minImageCount == rhs.minImageCount ) + && ( maxImageCount == rhs.maxImageCount ) + && ( currentExtent == rhs.currentExtent ) + && ( minImageExtent == rhs.minImageExtent ) + && ( maxImageExtent == rhs.maxImageExtent ) + && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) + && ( supportedTransforms == rhs.supportedTransforms ) + && ( currentTransform == rhs.currentTransform ) + && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) + && ( supportedUsageFlags == rhs.supportedUsageFlags ) + && ( supportedSurfaceCounters == rhs.supportedSurfaceCounters ); + } + + bool operator!=( SurfaceCapabilities2EXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2EXT; + void* pNext = {}; + uint32_t minImageCount = {}; + uint32_t maxImageCount = {}; + VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {}; + uint32_t maxImageArrayLayers = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {}; + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters = {}; + + }; + static_assert( sizeof( SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SurfaceCapabilities2EXT; + }; + + struct SurfaceCapabilitiesKHR + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR(uint32_t minImageCount_ = {}, uint32_t maxImageCount_ = {}, VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, uint32_t maxImageArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}) VULKAN_HPP_NOEXCEPT + : minImageCount( minImageCount_ ), maxImageCount( maxImageCount_ ), currentExtent( currentExtent_ ), minImageExtent( minImageExtent_ ), maxImageExtent( maxImageExtent_ ), maxImageArrayLayers( maxImageArrayLayers_ ), supportedTransforms( supportedTransforms_ ), currentTransform( currentTransform_ ), supportedCompositeAlpha( supportedCompositeAlpha_ ), supportedUsageFlags( supportedUsageFlags_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilitiesKHR( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SurfaceCapabilitiesKHR & operator=( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SurfaceCapabilitiesKHR & operator=( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SurfaceCapabilitiesKHR ) ); + return *this; + } + + + operator VkSurfaceCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SurfaceCapabilitiesKHR const& ) const = default; +#else + bool operator==( SurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( minImageCount == rhs.minImageCount ) + && ( maxImageCount == rhs.maxImageCount ) + && ( currentExtent == rhs.currentExtent ) + && ( minImageExtent == rhs.minImageExtent ) + && ( maxImageExtent == rhs.maxImageExtent ) + && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) + && ( supportedTransforms == rhs.supportedTransforms ) + && ( currentTransform == rhs.currentTransform ) + && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) + && ( supportedUsageFlags == rhs.supportedUsageFlags ); + } + + bool operator!=( SurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + uint32_t minImageCount = {}; + uint32_t maxImageCount = {}; + VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {}; + uint32_t maxImageArrayLayers = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {}; + + }; + static_assert( sizeof( SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SurfaceCapabilities2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR(VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities_ = {}) VULKAN_HPP_NOEXCEPT + : surfaceCapabilities( surfaceCapabilities_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilities2KHR( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SurfaceCapabilities2KHR & operator=( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SurfaceCapabilities2KHR & operator=( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SurfaceCapabilities2KHR ) ); + return *this; + } + + + operator VkSurfaceCapabilities2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilities2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SurfaceCapabilities2KHR const& ) const = default; +#else + bool operator==( SurfaceCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( surfaceCapabilities == rhs.surfaceCapabilities ); + } + + bool operator!=( SurfaceCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2KHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities = {}; + + }; + static_assert( sizeof( SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SurfaceCapabilities2KHR; + }; + + struct SurfaceFormatKHR + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceFormatKHR(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear) VULKAN_HPP_NOEXCEPT + : format( format_ ), colorSpace( colorSpace_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceFormatKHR( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFormatKHR( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SurfaceFormatKHR & operator=( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SurfaceFormatKHR & operator=( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SurfaceFormatKHR ) ); + return *this; + } + + + operator VkSurfaceFormatKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceFormatKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SurfaceFormatKHR const& ) const = default; +#else + bool operator==( SurfaceFormatKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( format == rhs.format ) + && ( colorSpace == rhs.colorSpace ); + } + + bool operator!=( SurfaceFormatKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear; + + }; + static_assert( sizeof( SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SurfaceFormat2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFormat2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR(VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat_ = {}) VULKAN_HPP_NOEXCEPT + : surfaceFormat( surfaceFormat_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFormat2KHR( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SurfaceFormat2KHR & operator=( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SurfaceFormat2KHR & operator=( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SurfaceFormat2KHR ) ); + return *this; + } + + + operator VkSurfaceFormat2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceFormat2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SurfaceFormat2KHR const& ) const = default; +#else + bool operator==( SurfaceFormat2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( surfaceFormat == rhs.surfaceFormat ); + } + + bool operator!=( SurfaceFormat2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFormat2KHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat = {}; + + }; + static_assert( sizeof( SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SurfaceFormat2KHR; + }; + + struct PhysicalDeviceToolPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceToolPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolPropertiesEXT(std::array const& name_ = {}, std::array const& version_ = {}, VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT purposes_ = {}, std::array const& description_ = {}, std::array const& layer_ = {}) VULKAN_HPP_NOEXCEPT + : name( name_ ), version( version_ ), purposes( purposes_ ), description( description_ ), layer( layer_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolPropertiesEXT( PhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceToolPropertiesEXT( VkPhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDeviceToolPropertiesEXT & operator=( VkPhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceToolPropertiesEXT & operator=( PhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceToolPropertiesEXT ) ); + return *this; + } + + + operator VkPhysicalDeviceToolPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceToolPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceToolPropertiesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceToolPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( name == rhs.name ) + && ( version == rhs.version ) + && ( purposes == rhs.purposes ) + && ( description == rhs.description ) + && ( layer == rhs.layer ); + } + + bool operator!=( PhysicalDeviceToolPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceToolPropertiesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D version = {}; + VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT purposes = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D layer = {}; + + }; + static_assert( sizeof( PhysicalDeviceToolPropertiesEXT ) == sizeof( VkPhysicalDeviceToolPropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceToolPropertiesEXT; + }; + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueDevice = UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + class PhysicalDevice + { + public: + using CType = VkPhysicalDevice; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice; + + public: + VULKAN_HPP_CONSTEXPR PhysicalDevice() VULKAN_HPP_NOEXCEPT + : m_physicalDevice(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevice( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_physicalDevice(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PhysicalDevice( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT + : m_physicalDevice( physicalDevice ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + PhysicalDevice & operator=(VkPhysicalDevice physicalDevice) VULKAN_HPP_NOEXCEPT + { + m_physicalDevice = physicalDevice; + return *this; + } +#endif + + PhysicalDevice & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_physicalDevice = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDevice const& ) const = default; +#else + bool operator==( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice == rhs.m_physicalDevice; + } + + bool operator!=(PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice != rhs.m_physicalDevice; + } + + bool operator<(PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice < rhs.m_physicalDevice; + } +#endif + +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT + template + VULKAN_HPP_NODISCARD Result acquireXlibDisplayEXT( Display* dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + template + VULKAN_HPP_NODISCARD Result createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Device* pDevice, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createDevice( const DeviceCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createDeviceUnique( const DeviceCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DisplayModeKHR* pMode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateDeviceExtensionProperties( Optional layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateDeviceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result enumerateDeviceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateDeviceLayerProperties(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateDeviceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + + template + VULKAN_HPP_NODISCARD Result enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, uint32_t* pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR* pCounters, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR* pCounterDescriptions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy const &counters, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = Allocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy const &counters, Allocator const& vectorAllocator, Dispatch const &d ) const; + template , typename PerformanceCounterDescriptionKHRAllocator = std::allocator, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType, std::vector>>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , typename PerformanceCounterDescriptionKHRAllocator = std::allocator, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = PerformanceCounterKHRAllocator, typename B2 = PerformanceCounterDescriptionKHRAllocator, typename std::enable_if < std::is_same::value && std::is_same::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, std::vector>>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, PerformanceCounterKHRAllocator & countersAllocator, PerformanceCounterDescriptionKHRAllocator & counterDescriptionsAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR* pDisplayPlaneInfo, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplays, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainEXT* pTimeDomains, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getCalibrateableTimeDomainsEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getCalibrateableTimeDomainsEXT(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getCooperativeMatrixPropertiesNV( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getCooperativeMatrixPropertiesNV(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getCooperativeMatrixPropertiesNV(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + template + Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB* dfb, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlaneProperties2KHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlaneProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlanePropertiesKHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlanePropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayProperties2KHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayPropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPropertiesKHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pFeatures, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures getFeatures(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getFeatures2(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2KHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getFeatures2KHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties* pFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::FormatProperties getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties* pImageFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties getMemoryProperties(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getMemoryProperties2(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getMemoryProperties2KHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pRectCount, VULKAN_HPP_NAMESPACE::Rect2D* pRects, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties getProperties(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getProperties2(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2KHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getProperties2KHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint32_t getQueueFamilyPerformanceQueryPassesKHR( const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties* pQueueFamilyProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getQueueFamilyProperties(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + std::vector getQueueFamilyProperties(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getQueueFamilyProperties2(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + std::vector getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getQueueFamilyProperties2(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + std::vector getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getQueueFamilyProperties2KHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + std::vector getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getQueueFamilyProperties2KHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + std::vector getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + std::vector getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + std::vector getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + std::vector getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t* pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV* pCombinations, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getSupportedFramebufferMixedSamplesCombinationsNV(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getSupportedFramebufferMixedSamplesCombinationsNV(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_NODISCARD Result getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_NODISCARD Result getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::Bool32* pSupported, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getToolPropertiesEXT( uint32_t* pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT* pToolProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getToolPropertiesEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getToolPropertiesEXT(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + template + Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_XCB_KHR + template + Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_KHR + template + Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT + template + Result getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplay, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPhysicalDevice() const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice == VK_NULL_HANDLE; + } + + private: + VkPhysicalDevice m_physicalDevice; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) == sizeof( VkPhysicalDevice ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::PhysicalDevice; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PhysicalDevice; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PhysicalDevice; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct DeviceGroupDeviceCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupDeviceCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo(uint32_t physicalDeviceCount_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices_ = {}) VULKAN_HPP_NOEXCEPT + : physicalDeviceCount( physicalDeviceCount_ ), pPhysicalDevices( pPhysicalDevices_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DeviceGroupDeviceCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & physicalDevices_ ) + : physicalDeviceCount( static_cast( physicalDevices_.size() ) ), pPhysicalDevices( physicalDevices_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DeviceGroupDeviceCreateInfo & operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceGroupDeviceCreateInfo & operator=( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DeviceGroupDeviceCreateInfo ) ); + return *this; + } + + DeviceGroupDeviceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceGroupDeviceCreateInfo & setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) VULKAN_HPP_NOEXCEPT + { + physicalDeviceCount = physicalDeviceCount_; + return *this; + } + + DeviceGroupDeviceCreateInfo & setPPhysicalDevices( const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT + { + pPhysicalDevices = pPhysicalDevices_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DeviceGroupDeviceCreateInfo & setPhysicalDevices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & physicalDevices_ ) VULKAN_HPP_NOEXCEPT + { + physicalDeviceCount = static_cast( physicalDevices_.size() ); + pPhysicalDevices = physicalDevices_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkDeviceGroupDeviceCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceGroupDeviceCreateInfo const& ) const = default; +#else + bool operator==( DeviceGroupDeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( physicalDeviceCount == rhs.physicalDeviceCount ) + && ( pPhysicalDevices == rhs.pPhysicalDevices ); + } + + bool operator!=( DeviceGroupDeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo; + const void* pNext = {}; + uint32_t physicalDeviceCount = {}; + const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices = {}; + + }; + static_assert( sizeof( DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceGroupDeviceCreateInfo; + }; + using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo; + + struct DeviceGroupPresentInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR(uint32_t swapchainCount_ = {}, const uint32_t* pDeviceMasks_ = {}, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal) VULKAN_HPP_NOEXCEPT + : swapchainCount( swapchainCount_ ), pDeviceMasks( pDeviceMasks_ ), mode( mode_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DeviceGroupPresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceMasks_, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal ) + : swapchainCount( static_cast( deviceMasks_.size() ) ), pDeviceMasks( deviceMasks_.data() ), mode( mode_ ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DeviceGroupPresentInfoKHR & operator=( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceGroupPresentInfoKHR & operator=( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DeviceGroupPresentInfoKHR ) ); + return *this; + } + + DeviceGroupPresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceGroupPresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + DeviceGroupPresentInfoKHR & setPDeviceMasks( const uint32_t* pDeviceMasks_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceMasks = pDeviceMasks_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DeviceGroupPresentInfoKHR & setDeviceMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceMasks_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( deviceMasks_.size() ); + pDeviceMasks = deviceMasks_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + DeviceGroupPresentInfoKHR & setMode( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + + operator VkDeviceGroupPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupPresentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceGroupPresentInfoKHR const& ) const = default; +#else + bool operator==( DeviceGroupPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( swapchainCount == rhs.swapchainCount ) + && ( pDeviceMasks == rhs.pDeviceMasks ) + && ( mode == rhs.mode ); + } + + bool operator!=( DeviceGroupPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentInfoKHR; + const void* pNext = {}; + uint32_t swapchainCount = {}; + const uint32_t* pDeviceMasks = {}; + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal; + + }; + static_assert( sizeof( DeviceGroupPresentInfoKHR ) == sizeof( VkDeviceGroupPresentInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceGroupPresentInfoKHR; + }; + + struct DeviceGroupRenderPassBeginInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupRenderPassBeginInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo(uint32_t deviceMask_ = {}, uint32_t deviceRenderAreaCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas_ = {}) VULKAN_HPP_NOEXCEPT + : deviceMask( deviceMask_ ), deviceRenderAreaCount( deviceRenderAreaCount_ ), pDeviceRenderAreas( pDeviceRenderAreas_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceRenderAreas_ ) + : deviceMask( deviceMask_ ), deviceRenderAreaCount( static_cast( deviceRenderAreas_.size() ) ), pDeviceRenderAreas( deviceRenderAreas_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DeviceGroupRenderPassBeginInfo & operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceGroupRenderPassBeginInfo & operator=( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DeviceGroupRenderPassBeginInfo ) ); + return *this; + } + + DeviceGroupRenderPassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceGroupRenderPassBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + { + deviceMask = deviceMask_; + return *this; + } + + DeviceGroupRenderPassBeginInfo & setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT + { + deviceRenderAreaCount = deviceRenderAreaCount_; + return *this; + } + + DeviceGroupRenderPassBeginInfo & setPDeviceRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceRenderAreas = pDeviceRenderAreas_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DeviceGroupRenderPassBeginInfo & setDeviceRenderAreas( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT + { + deviceRenderAreaCount = static_cast( deviceRenderAreas_.size() ); + pDeviceRenderAreas = deviceRenderAreas_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkDeviceGroupRenderPassBeginInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceGroupRenderPassBeginInfo const& ) const = default; +#else + bool operator==( DeviceGroupRenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( deviceMask == rhs.deviceMask ) + && ( deviceRenderAreaCount == rhs.deviceRenderAreaCount ) + && ( pDeviceRenderAreas == rhs.pDeviceRenderAreas ); + } + + bool operator!=( DeviceGroupRenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo; + const void* pNext = {}; + uint32_t deviceMask = {}; + uint32_t deviceRenderAreaCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas = {}; + + }; + static_assert( sizeof( DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceGroupRenderPassBeginInfo; + }; + using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo; + + struct DeviceGroupSubmitInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSubmitInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo(uint32_t waitSemaphoreCount_ = {}, const uint32_t* pWaitSemaphoreDeviceIndices_ = {}, uint32_t commandBufferCount_ = {}, const uint32_t* pCommandBufferDeviceMasks_ = {}, uint32_t signalSemaphoreCount_ = {}, const uint32_t* pSignalSemaphoreDeviceIndices_ = {}) VULKAN_HPP_NOEXCEPT + : waitSemaphoreCount( waitSemaphoreCount_ ), pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ ), commandBufferCount( commandBufferCount_ ), pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ ), signalSemaphoreCount( signalSemaphoreCount_ ), pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DeviceGroupSubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreDeviceIndices_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBufferDeviceMasks_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreDeviceIndices_ = {} ) + : waitSemaphoreCount( static_cast( waitSemaphoreDeviceIndices_.size() ) ), pWaitSemaphoreDeviceIndices( waitSemaphoreDeviceIndices_.data() ), commandBufferCount( static_cast( commandBufferDeviceMasks_.size() ) ), pCommandBufferDeviceMasks( commandBufferDeviceMasks_.data() ), signalSemaphoreCount( static_cast( signalSemaphoreDeviceIndices_.size() ) ), pSignalSemaphoreDeviceIndices( signalSemaphoreDeviceIndices_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DeviceGroupSubmitInfo & operator=( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceGroupSubmitInfo & operator=( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DeviceGroupSubmitInfo ) ); + return *this; + } + + DeviceGroupSubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceGroupSubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + DeviceGroupSubmitInfo & setPWaitSemaphoreDeviceIndices( const uint32_t* pWaitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DeviceGroupSubmitInfo & setWaitSemaphoreDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitSemaphoreDeviceIndices_.size() ); + pWaitSemaphoreDeviceIndices = waitSemaphoreDeviceIndices_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + DeviceGroupSubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferCount = commandBufferCount_; + return *this; + } + + DeviceGroupSubmitInfo & setPCommandBufferDeviceMasks( const uint32_t* pCommandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT + { + pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DeviceGroupSubmitInfo & setCommandBufferDeviceMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferCount = static_cast( commandBufferDeviceMasks_.size() ); + pCommandBufferDeviceMasks = commandBufferDeviceMasks_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + DeviceGroupSubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = signalSemaphoreCount_; + return *this; + } + + DeviceGroupSubmitInfo & setPSignalSemaphoreDeviceIndices( const uint32_t* pSignalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DeviceGroupSubmitInfo & setSignalSemaphoreDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = static_cast( signalSemaphoreDeviceIndices_.size() ); + pSignalSemaphoreDeviceIndices = signalSemaphoreDeviceIndices_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkDeviceGroupSubmitInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceGroupSubmitInfo const& ) const = default; +#else + bool operator==( DeviceGroupSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) + && ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices ) + && ( commandBufferCount == rhs.commandBufferCount ) + && ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks ) + && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) + && ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices ); + } + + bool operator!=( DeviceGroupSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSubmitInfo; + const void* pNext = {}; + uint32_t waitSemaphoreCount = {}; + const uint32_t* pWaitSemaphoreDeviceIndices = {}; + uint32_t commandBufferCount = {}; + const uint32_t* pCommandBufferDeviceMasks = {}; + uint32_t signalSemaphoreCount = {}; + const uint32_t* pSignalSemaphoreDeviceIndices = {}; + + }; + static_assert( sizeof( DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceGroupSubmitInfo; + }; + using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo; + + struct DeviceGroupSwapchainCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSwapchainCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR(VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {}) VULKAN_HPP_NOEXCEPT + : modes( modes_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DeviceGroupSwapchainCreateInfoKHR & operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceGroupSwapchainCreateInfoKHR & operator=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DeviceGroupSwapchainCreateInfoKHR ) ); + return *this; + } + + DeviceGroupSwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceGroupSwapchainCreateInfoKHR & setModes( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT + { + modes = modes_; + return *this; + } + + + operator VkDeviceGroupSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceGroupSwapchainCreateInfoKHR const& ) const = default; +#else + bool operator==( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( modes == rhs.modes ); + } + + bool operator!=( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {}; + + }; + static_assert( sizeof( DeviceGroupSwapchainCreateInfoKHR ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceGroupSwapchainCreateInfoKHR; + }; + + struct DeviceMemoryOverallocationCreateInfoAMD + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD(VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault) VULKAN_HPP_NOEXCEPT + : overallocationBehavior( overallocationBehavior_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DeviceMemoryOverallocationCreateInfoAMD & operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceMemoryOverallocationCreateInfoAMD & operator=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DeviceMemoryOverallocationCreateInfoAMD ) ); + return *this; + } + + DeviceMemoryOverallocationCreateInfoAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceMemoryOverallocationCreateInfoAMD & setOverallocationBehavior( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ ) VULKAN_HPP_NOEXCEPT + { + overallocationBehavior = overallocationBehavior_; + return *this; + } + + + operator VkDeviceMemoryOverallocationCreateInfoAMD const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceMemoryOverallocationCreateInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceMemoryOverallocationCreateInfoAMD const& ) const = default; +#else + bool operator==( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( overallocationBehavior == rhs.overallocationBehavior ); + } + + bool operator!=( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault; + + }; + static_assert( sizeof( DeviceMemoryOverallocationCreateInfoAMD ) == sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceMemoryOverallocationCreateInfoAMD; + }; + + struct DevicePrivateDataCreateInfoEXT + { + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDevicePrivateDataCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfoEXT(uint32_t privateDataSlotRequestCount_ = {}) VULKAN_HPP_NOEXCEPT + : privateDataSlotRequestCount( privateDataSlotRequestCount_ ) + {} + + VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfoEXT( DevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DevicePrivateDataCreateInfoEXT( VkDevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DevicePrivateDataCreateInfoEXT & operator=( VkDevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DevicePrivateDataCreateInfoEXT & operator=( DevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DevicePrivateDataCreateInfoEXT ) ); + return *this; + } + + DevicePrivateDataCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DevicePrivateDataCreateInfoEXT & setPrivateDataSlotRequestCount( uint32_t privateDataSlotRequestCount_ ) VULKAN_HPP_NOEXCEPT + { + privateDataSlotRequestCount = privateDataSlotRequestCount_; + return *this; + } + + + operator VkDevicePrivateDataCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDevicePrivateDataCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DevicePrivateDataCreateInfoEXT const& ) const = default; +#else + bool operator==( DevicePrivateDataCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( privateDataSlotRequestCount == rhs.privateDataSlotRequestCount ); + } + + bool operator!=( DevicePrivateDataCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDevicePrivateDataCreateInfoEXT; + const void* pNext = {}; + uint32_t privateDataSlotRequestCount = {}; + + }; + static_assert( sizeof( DevicePrivateDataCreateInfoEXT ) == sizeof( VkDevicePrivateDataCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DevicePrivateDataCreateInfoEXT; + }; + + struct DeviceQueueGlobalPriorityCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoEXT(VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow) VULKAN_HPP_NOEXCEPT + : globalPriority( globalPriority_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoEXT( DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceQueueGlobalPriorityCreateInfoEXT( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DeviceQueueGlobalPriorityCreateInfoEXT & operator=( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceQueueGlobalPriorityCreateInfoEXT & operator=( DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) ); + return *this; + } + + DeviceQueueGlobalPriorityCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceQueueGlobalPriorityCreateInfoEXT & setGlobalPriority( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ ) VULKAN_HPP_NOEXCEPT + { + globalPriority = globalPriority_; + return *this; + } + + + operator VkDeviceQueueGlobalPriorityCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceQueueGlobalPriorityCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceQueueGlobalPriorityCreateInfoEXT const& ) const = default; +#else + bool operator==( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( globalPriority == rhs.globalPriority ); + } + + bool operator!=( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow; + + }; + static_assert( sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceQueueGlobalPriorityCreateInfoEXT; + }; + +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + struct DirectFBSurfaceCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectfbSurfaceCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT(VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ = {}, IDirectFB* dfb_ = {}, IDirectFBSurface* surface_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), dfb( dfb_ ), surface( surface_ ) + {} + + VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DirectFBSurfaceCreateInfoEXT( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DirectFBSurfaceCreateInfoEXT & operator=( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DirectFBSurfaceCreateInfoEXT & operator=( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DirectFBSurfaceCreateInfoEXT ) ); + return *this; + } + + DirectFBSurfaceCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DirectFBSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DirectFBSurfaceCreateInfoEXT & setDfb( IDirectFB* dfb_ ) VULKAN_HPP_NOEXCEPT + { + dfb = dfb_; + return *this; + } + + DirectFBSurfaceCreateInfoEXT & setSurface( IDirectFBSurface* surface_ ) VULKAN_HPP_NOEXCEPT + { + surface = surface_; + return *this; + } + + + operator VkDirectFBSurfaceCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDirectFBSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DirectFBSurfaceCreateInfoEXT const& ) const = default; +#else + bool operator==( DirectFBSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( dfb == rhs.dfb ) + && ( surface == rhs.surface ); + } + + bool operator!=( DirectFBSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDirectfbSurfaceCreateInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags = {}; + IDirectFB* dfb = {}; + IDirectFBSurface* surface = {}; + + }; + static_assert( sizeof( DirectFBSurfaceCreateInfoEXT ) == sizeof( VkDirectFBSurfaceCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DirectFBSurfaceCreateInfoEXT; + }; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + struct DispatchIndirectCommand + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DispatchIndirectCommand(uint32_t x_ = {}, uint32_t y_ = {}, uint32_t z_ = {}) VULKAN_HPP_NOEXCEPT + : x( x_ ), y( y_ ), z( z_ ) + {} + + VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DispatchIndirectCommand & operator=( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DispatchIndirectCommand & operator=( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DispatchIndirectCommand ) ); + return *this; + } + + DispatchIndirectCommand & setX( uint32_t x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + DispatchIndirectCommand & setY( uint32_t y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + DispatchIndirectCommand & setZ( uint32_t z_ ) VULKAN_HPP_NOEXCEPT + { + z = z_; + return *this; + } + + + operator VkDispatchIndirectCommand const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDispatchIndirectCommand &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DispatchIndirectCommand const& ) const = default; +#else + bool operator==( DispatchIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( x == rhs.x ) + && ( y == rhs.y ) + && ( z == rhs.z ); + } + + bool operator!=( DispatchIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + uint32_t x = {}; + uint32_t y = {}; + uint32_t z = {}; + + }; + static_assert( sizeof( DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DisplayNativeHdrSurfaceCapabilitiesAMD + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD(VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport_ = {}) VULKAN_HPP_NOEXCEPT + : localDimmingSupport( localDimmingSupport_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayNativeHdrSurfaceCapabilitiesAMD( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DisplayNativeHdrSurfaceCapabilitiesAMD ) ); + return *this; + } + + + operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayNativeHdrSurfaceCapabilitiesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayNativeHdrSurfaceCapabilitiesAMD const& ) const = default; +#else + bool operator==( DisplayNativeHdrSurfaceCapabilitiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( localDimmingSupport == rhs.localDimmingSupport ); + } + + bool operator!=( DisplayNativeHdrSurfaceCapabilitiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport = {}; + + }; + static_assert( sizeof( DisplayNativeHdrSurfaceCapabilitiesAMD ) == sizeof( VkDisplayNativeHdrSurfaceCapabilitiesAMD ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplayNativeHdrSurfaceCapabilitiesAMD; + }; + + struct DisplayPresentInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPresentInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR(VULKAN_HPP_NAMESPACE::Rect2D srcRect_ = {}, VULKAN_HPP_NAMESPACE::Rect2D dstRect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 persistent_ = {}) VULKAN_HPP_NOEXCEPT + : srcRect( srcRect_ ), dstRect( dstRect_ ), persistent( persistent_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DisplayPresentInfoKHR & operator=( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DisplayPresentInfoKHR & operator=( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DisplayPresentInfoKHR ) ); + return *this; + } + + DisplayPresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DisplayPresentInfoKHR & setSrcRect( VULKAN_HPP_NAMESPACE::Rect2D const & srcRect_ ) VULKAN_HPP_NOEXCEPT + { + srcRect = srcRect_; + return *this; + } + + DisplayPresentInfoKHR & setDstRect( VULKAN_HPP_NAMESPACE::Rect2D const & dstRect_ ) VULKAN_HPP_NOEXCEPT + { + dstRect = dstRect_; + return *this; + } + + DisplayPresentInfoKHR & setPersistent( VULKAN_HPP_NAMESPACE::Bool32 persistent_ ) VULKAN_HPP_NOEXCEPT + { + persistent = persistent_; + return *this; + } + + + operator VkDisplayPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPresentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayPresentInfoKHR const& ) const = default; +#else + bool operator==( DisplayPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcRect == rhs.srcRect ) + && ( dstRect == rhs.dstRect ) + && ( persistent == rhs.persistent ); + } + + bool operator!=( DisplayPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPresentInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Rect2D srcRect = {}; + VULKAN_HPP_NAMESPACE::Rect2D dstRect = {}; + VULKAN_HPP_NAMESPACE::Bool32 persistent = {}; + + }; + static_assert( sizeof( DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplayPresentInfoKHR; + }; + + struct DisplaySurfaceCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplaySurfaceCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, uint32_t planeIndex_ = {}, uint32_t planeStackIndex_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, float globalAlpha_ = {}, VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque, VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), displayMode( displayMode_ ), planeIndex( planeIndex_ ), planeStackIndex( planeStackIndex_ ), transform( transform_ ), globalAlpha( globalAlpha_ ), alphaMode( alphaMode_ ), imageExtent( imageExtent_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DisplaySurfaceCreateInfoKHR & operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DisplaySurfaceCreateInfoKHR & operator=( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DisplaySurfaceCreateInfoKHR ) ); + return *this; + } + + DisplaySurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & setDisplayMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ ) VULKAN_HPP_NOEXCEPT + { + displayMode = displayMode_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT + { + planeIndex = planeIndex_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & setPlaneStackIndex( uint32_t planeStackIndex_ ) VULKAN_HPP_NOEXCEPT + { + planeStackIndex = planeStackIndex_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + { + transform = transform_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & setGlobalAlpha( float globalAlpha_ ) VULKAN_HPP_NOEXCEPT + { + globalAlpha = globalAlpha_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & setAlphaMode( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) VULKAN_HPP_NOEXCEPT + { + alphaMode = alphaMode_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } + + + operator VkDisplaySurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplaySurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplaySurfaceCreateInfoKHR const& ) const = default; +#else + bool operator==( DisplaySurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( displayMode == rhs.displayMode ) + && ( planeIndex == rhs.planeIndex ) + && ( planeStackIndex == rhs.planeStackIndex ) + && ( transform == rhs.transform ) + && ( globalAlpha == rhs.globalAlpha ) + && ( alphaMode == rhs.alphaMode ) + && ( imageExtent == rhs.imageExtent ); + } + + bool operator!=( DisplaySurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {}; + uint32_t planeIndex = {}; + uint32_t planeStackIndex = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + float globalAlpha = {}; + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque; + VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {}; + + }; + static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplaySurfaceCreateInfoKHR; + }; + + struct DrawIndexedIndirectCommand + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand(uint32_t indexCount_ = {}, uint32_t instanceCount_ = {}, uint32_t firstIndex_ = {}, int32_t vertexOffset_ = {}, uint32_t firstInstance_ = {}) VULKAN_HPP_NOEXCEPT + : indexCount( indexCount_ ), instanceCount( instanceCount_ ), firstIndex( firstIndex_ ), vertexOffset( vertexOffset_ ), firstInstance( firstInstance_ ) + {} + + VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DrawIndexedIndirectCommand & operator=( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DrawIndexedIndirectCommand & operator=( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DrawIndexedIndirectCommand ) ); + return *this; + } + + DrawIndexedIndirectCommand & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT + { + indexCount = indexCount_; + return *this; + } + + DrawIndexedIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT + { + instanceCount = instanceCount_; + return *this; + } + + DrawIndexedIndirectCommand & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT + { + firstIndex = firstIndex_; + return *this; + } + + DrawIndexedIndirectCommand & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT + { + vertexOffset = vertexOffset_; + return *this; + } + + DrawIndexedIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT + { + firstInstance = firstInstance_; + return *this; + } + + + operator VkDrawIndexedIndirectCommand const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrawIndexedIndirectCommand &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DrawIndexedIndirectCommand const& ) const = default; +#else + bool operator==( DrawIndexedIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( indexCount == rhs.indexCount ) + && ( instanceCount == rhs.instanceCount ) + && ( firstIndex == rhs.firstIndex ) + && ( vertexOffset == rhs.vertexOffset ) + && ( firstInstance == rhs.firstInstance ); + } + + bool operator!=( DrawIndexedIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + uint32_t indexCount = {}; + uint32_t instanceCount = {}; + uint32_t firstIndex = {}; + int32_t vertexOffset = {}; + uint32_t firstInstance = {}; + + }; + static_assert( sizeof( DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DrawIndirectCommand + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrawIndirectCommand(uint32_t vertexCount_ = {}, uint32_t instanceCount_ = {}, uint32_t firstVertex_ = {}, uint32_t firstInstance_ = {}) VULKAN_HPP_NOEXCEPT + : vertexCount( vertexCount_ ), instanceCount( instanceCount_ ), firstVertex( firstVertex_ ), firstInstance( firstInstance_ ) + {} + + VULKAN_HPP_CONSTEXPR DrawIndirectCommand( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DrawIndirectCommand & operator=( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DrawIndirectCommand & operator=( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DrawIndirectCommand ) ); + return *this; + } + + DrawIndirectCommand & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexCount = vertexCount_; + return *this; + } + + DrawIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT + { + instanceCount = instanceCount_; + return *this; + } + + DrawIndirectCommand & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT + { + firstVertex = firstVertex_; + return *this; + } + + DrawIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT + { + firstInstance = firstInstance_; + return *this; + } + + + operator VkDrawIndirectCommand const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrawIndirectCommand &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DrawIndirectCommand const& ) const = default; +#else + bool operator==( DrawIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( vertexCount == rhs.vertexCount ) + && ( instanceCount == rhs.instanceCount ) + && ( firstVertex == rhs.firstVertex ) + && ( firstInstance == rhs.firstInstance ); + } + + bool operator!=( DrawIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + uint32_t vertexCount = {}; + uint32_t instanceCount = {}; + uint32_t firstVertex = {}; + uint32_t firstInstance = {}; + + }; + static_assert( sizeof( DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DrawMeshTasksIndirectCommandNV + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV(uint32_t taskCount_ = {}, uint32_t firstTask_ = {}) VULKAN_HPP_NOEXCEPT + : taskCount( taskCount_ ), firstTask( firstTask_ ) + {} + + VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawMeshTasksIndirectCommandNV( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DrawMeshTasksIndirectCommandNV & operator=( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DrawMeshTasksIndirectCommandNV & operator=( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DrawMeshTasksIndirectCommandNV ) ); + return *this; + } + + DrawMeshTasksIndirectCommandNV & setTaskCount( uint32_t taskCount_ ) VULKAN_HPP_NOEXCEPT + { + taskCount = taskCount_; + return *this; + } + + DrawMeshTasksIndirectCommandNV & setFirstTask( uint32_t firstTask_ ) VULKAN_HPP_NOEXCEPT + { + firstTask = firstTask_; + return *this; + } + + + operator VkDrawMeshTasksIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrawMeshTasksIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DrawMeshTasksIndirectCommandNV const& ) const = default; +#else + bool operator==( DrawMeshTasksIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( taskCount == rhs.taskCount ) + && ( firstTask == rhs.firstTask ); + } + + bool operator!=( DrawMeshTasksIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + uint32_t taskCount = {}; + uint32_t firstTask = {}; + + }; + static_assert( sizeof( DrawMeshTasksIndirectCommandNV ) == sizeof( VkDrawMeshTasksIndirectCommandNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DrmFormatModifierPropertiesEXT + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT(uint64_t drmFormatModifier_ = {}, uint32_t drmFormatModifierPlaneCount_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures_ = {}) VULKAN_HPP_NOEXCEPT + : drmFormatModifier( drmFormatModifier_ ), drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ), drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ ) + {} + + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrmFormatModifierPropertiesEXT( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DrmFormatModifierPropertiesEXT & operator=( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DrmFormatModifierPropertiesEXT & operator=( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DrmFormatModifierPropertiesEXT ) ); + return *this; + } + + + operator VkDrmFormatModifierPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DrmFormatModifierPropertiesEXT const& ) const = default; +#else + bool operator==( DrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( drmFormatModifier == rhs.drmFormatModifier ) + && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) + && ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures ); + } + + bool operator!=( DrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + uint64_t drmFormatModifier = {}; + uint32_t drmFormatModifierPlaneCount = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures = {}; + + }; + static_assert( sizeof( DrmFormatModifierPropertiesEXT ) == sizeof( VkDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DrmFormatModifierPropertiesListEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDrmFormatModifierPropertiesListEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT(uint32_t drmFormatModifierCount_ = {}, VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties_ = {}) VULKAN_HPP_NOEXCEPT + : drmFormatModifierCount( drmFormatModifierCount_ ), pDrmFormatModifierProperties( pDrmFormatModifierProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + DrmFormatModifierPropertiesListEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drmFormatModifierProperties_ ) + : drmFormatModifierCount( static_cast( drmFormatModifierProperties_.size() ) ), pDrmFormatModifierProperties( drmFormatModifierProperties_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + DrmFormatModifierPropertiesListEXT & operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DrmFormatModifierPropertiesListEXT & operator=( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( DrmFormatModifierPropertiesListEXT ) ); + return *this; + } + + + operator VkDrmFormatModifierPropertiesListEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrmFormatModifierPropertiesListEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DrmFormatModifierPropertiesListEXT const& ) const = default; +#else + bool operator==( DrmFormatModifierPropertiesListEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) + && ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties ); + } + + bool operator!=( DrmFormatModifierPropertiesListEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT; + void* pNext = {}; + uint32_t drmFormatModifierCount = {}; + VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties = {}; + + }; + static_assert( sizeof( DrmFormatModifierPropertiesListEXT ) == sizeof( VkDrmFormatModifierPropertiesListEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DrmFormatModifierPropertiesListEXT; + }; + + struct ExportFenceCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ = {}) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ExportFenceCreateInfo & operator=( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExportFenceCreateInfo & operator=( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ExportFenceCreateInfo ) ); + return *this; + } + + ExportFenceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportFenceCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } + + + operator VkExportFenceCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportFenceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExportFenceCreateInfo const& ) const = default; +#else + bool operator==( ExportFenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExportFenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes = {}; + + }; + static_assert( sizeof( ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExportFenceCreateInfo; + }; + using ExportFenceCreateInfoKHR = ExportFenceCreateInfo; + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ExportFenceWin32HandleInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceWin32HandleInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR(const SECURITY_ATTRIBUTES* pAttributes_ = {}, DWORD dwAccess_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT + : pAttributes( pAttributes_ ), dwAccess( dwAccess_ ), name( name_ ) + {} + + VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ExportFenceWin32HandleInfoKHR & operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExportFenceWin32HandleInfoKHR & operator=( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ExportFenceWin32HandleInfoKHR ) ); + return *this; + } + + ExportFenceWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportFenceWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT + { + pAttributes = pAttributes_; + return *this; + } + + ExportFenceWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } + + ExportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } + + + operator VkExportFenceWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExportFenceWin32HandleInfoKHR const& ) const = default; +#else + bool operator==( ExportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pAttributes == rhs.pAttributes ) + && ( dwAccess == rhs.dwAccess ) + && ( name == rhs.name ); + } + + bool operator!=( ExportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR; + const void* pNext = {}; + const SECURITY_ATTRIBUTES* pAttributes = {}; + DWORD dwAccess = {}; + LPCWSTR name = {}; + + }; + static_assert( sizeof( ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExportFenceWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct ExportMemoryAllocateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ExportMemoryAllocateInfo & operator=( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExportMemoryAllocateInfo & operator=( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ExportMemoryAllocateInfo ) ); + return *this; + } + + ExportMemoryAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportMemoryAllocateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } + + + operator VkExportMemoryAllocateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExportMemoryAllocateInfo const& ) const = default; +#else + bool operator==( ExportMemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExportMemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; + + }; + static_assert( sizeof( ExportMemoryAllocateInfo ) == sizeof( VkExportMemoryAllocateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExportMemoryAllocateInfo; + }; + using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo; + + struct ExportMemoryAllocateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ExportMemoryAllocateInfoNV & operator=( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExportMemoryAllocateInfoNV & operator=( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ExportMemoryAllocateInfoNV ) ); + return *this; + } + + ExportMemoryAllocateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportMemoryAllocateInfoNV & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } + + + operator VkExportMemoryAllocateInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExportMemoryAllocateInfoNV const& ) const = default; +#else + bool operator==( ExportMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExportMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {}; + + }; + static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExportMemoryAllocateInfoNV; + }; + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ExportMemoryWin32HandleInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR(const SECURITY_ATTRIBUTES* pAttributes_ = {}, DWORD dwAccess_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT + : pAttributes( pAttributes_ ), dwAccess( dwAccess_ ), name( name_ ) + {} + + VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ExportMemoryWin32HandleInfoKHR & operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExportMemoryWin32HandleInfoKHR & operator=( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ExportMemoryWin32HandleInfoKHR ) ); + return *this; + } + + ExportMemoryWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportMemoryWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT + { + pAttributes = pAttributes_; + return *this; + } + + ExportMemoryWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } + + ExportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } + + + operator VkExportMemoryWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExportMemoryWin32HandleInfoKHR const& ) const = default; +#else + bool operator==( ExportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pAttributes == rhs.pAttributes ) + && ( dwAccess == rhs.dwAccess ) + && ( name == rhs.name ); + } + + bool operator!=( ExportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR; + const void* pNext = {}; + const SECURITY_ATTRIBUTES* pAttributes = {}; + DWORD dwAccess = {}; + LPCWSTR name = {}; + + }; + static_assert( sizeof( ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExportMemoryWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ExportMemoryWin32HandleInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV(const SECURITY_ATTRIBUTES* pAttributes_ = {}, DWORD dwAccess_ = {}) VULKAN_HPP_NOEXCEPT + : pAttributes( pAttributes_ ), dwAccess( dwAccess_ ) + {} + + VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ExportMemoryWin32HandleInfoNV & operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExportMemoryWin32HandleInfoNV & operator=( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ExportMemoryWin32HandleInfoNV ) ); + return *this; + } + + ExportMemoryWin32HandleInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportMemoryWin32HandleInfoNV & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT + { + pAttributes = pAttributes_; + return *this; + } + + ExportMemoryWin32HandleInfoNV & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } + + + operator VkExportMemoryWin32HandleInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExportMemoryWin32HandleInfoNV const& ) const = default; +#else + bool operator==( ExportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pAttributes == rhs.pAttributes ) + && ( dwAccess == rhs.dwAccess ); + } + + bool operator!=( ExportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV; + const void* pNext = {}; + const SECURITY_ATTRIBUTES* pAttributes = {}; + DWORD dwAccess = {}; + + }; + static_assert( sizeof( ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExportMemoryWin32HandleInfoNV; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct ExportSemaphoreCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ = {}) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ExportSemaphoreCreateInfo & operator=( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExportSemaphoreCreateInfo & operator=( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ExportSemaphoreCreateInfo ) ); + return *this; + } + + ExportSemaphoreCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportSemaphoreCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } + + + operator VkExportSemaphoreCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExportSemaphoreCreateInfo const& ) const = default; +#else + bool operator==( ExportSemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExportSemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes = {}; + + }; + static_assert( sizeof( ExportSemaphoreCreateInfo ) == sizeof( VkExportSemaphoreCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExportSemaphoreCreateInfo; + }; + using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo; + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ExportSemaphoreWin32HandleInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreWin32HandleInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR(const SECURITY_ATTRIBUTES* pAttributes_ = {}, DWORD dwAccess_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT + : pAttributes( pAttributes_ ), dwAccess( dwAccess_ ), name( name_ ) + {} + + VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ExportSemaphoreWin32HandleInfoKHR & operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExportSemaphoreWin32HandleInfoKHR & operator=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ExportSemaphoreWin32HandleInfoKHR ) ); + return *this; + } + + ExportSemaphoreWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportSemaphoreWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT + { + pAttributes = pAttributes_; + return *this; + } + + ExportSemaphoreWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } + + ExportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } + + + operator VkExportSemaphoreWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExportSemaphoreWin32HandleInfoKHR const& ) const = default; +#else + bool operator==( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pAttributes == rhs.pAttributes ) + && ( dwAccess == rhs.dwAccess ) + && ( name == rhs.name ); + } + + bool operator!=( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR; + const void* pNext = {}; + const SECURITY_ATTRIBUTES* pAttributes = {}; + DWORD dwAccess = {}; + LPCWSTR name = {}; + + }; + static_assert( sizeof( ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExportSemaphoreWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + struct ExternalFormatANDROID + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFormatANDROID; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalFormatANDROID(uint64_t externalFormat_ = {}) VULKAN_HPP_NOEXCEPT + : externalFormat( externalFormat_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ExternalFormatANDROID & operator=( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExternalFormatANDROID & operator=( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ExternalFormatANDROID ) ); + return *this; + } + + ExternalFormatANDROID & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExternalFormatANDROID & setExternalFormat( uint64_t externalFormat_ ) VULKAN_HPP_NOEXCEPT + { + externalFormat = externalFormat_; + return *this; + } + + + operator VkExternalFormatANDROID const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalFormatANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExternalFormatANDROID const& ) const = default; +#else + bool operator==( ExternalFormatANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( externalFormat == rhs.externalFormat ); + } + + bool operator!=( ExternalFormatANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFormatANDROID; + void* pNext = {}; + uint64_t externalFormat = {}; + + }; + static_assert( sizeof( ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExternalFormatANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + struct ExternalImageFormatProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalImageFormatProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties(VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {}) VULKAN_HPP_NOEXCEPT + : externalMemoryProperties( externalMemoryProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalImageFormatProperties( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ExternalImageFormatProperties & operator=( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExternalImageFormatProperties & operator=( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ExternalImageFormatProperties ) ); + return *this; + } + + + operator VkExternalImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalImageFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExternalImageFormatProperties const& ) const = default; +#else + bool operator==( ExternalImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( externalMemoryProperties == rhs.externalMemoryProperties ); + } + + bool operator!=( ExternalImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalImageFormatProperties; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {}; + + }; + static_assert( sizeof( ExternalImageFormatProperties ) == sizeof( VkExternalImageFormatProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExternalImageFormatProperties; + }; + using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties; + + struct ExternalMemoryBufferCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryBufferCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ExternalMemoryBufferCreateInfo & operator=( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExternalMemoryBufferCreateInfo & operator=( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ExternalMemoryBufferCreateInfo ) ); + return *this; + } + + ExternalMemoryBufferCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExternalMemoryBufferCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } + + + operator VkExternalMemoryBufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryBufferCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExternalMemoryBufferCreateInfo const& ) const = default; +#else + bool operator==( ExternalMemoryBufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExternalMemoryBufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryBufferCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; + + }; + static_assert( sizeof( ExternalMemoryBufferCreateInfo ) == sizeof( VkExternalMemoryBufferCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExternalMemoryBufferCreateInfo; + }; + using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo; + + struct ExternalMemoryImageCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ExternalMemoryImageCreateInfo & operator=( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExternalMemoryImageCreateInfo & operator=( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ExternalMemoryImageCreateInfo ) ); + return *this; + } + + ExternalMemoryImageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExternalMemoryImageCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } + + + operator VkExternalMemoryImageCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryImageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExternalMemoryImageCreateInfo const& ) const = default; +#else + bool operator==( ExternalMemoryImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExternalMemoryImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; + + }; + static_assert( sizeof( ExternalMemoryImageCreateInfo ) == sizeof( VkExternalMemoryImageCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExternalMemoryImageCreateInfo; + }; + using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo; + + struct ExternalMemoryImageCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ExternalMemoryImageCreateInfoNV & operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExternalMemoryImageCreateInfoNV & operator=( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ExternalMemoryImageCreateInfoNV ) ); + return *this; + } + + ExternalMemoryImageCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExternalMemoryImageCreateInfoNV & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } + + + operator VkExternalMemoryImageCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExternalMemoryImageCreateInfoNV const& ) const = default; +#else + bool operator==( ExternalMemoryImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExternalMemoryImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {}; + + }; + static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExternalMemoryImageCreateInfoNV; + }; + + struct FilterCubicImageViewImageFormatPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 filterCubic_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax_ = {}) VULKAN_HPP_NOEXCEPT + : filterCubic( filterCubic_ ), filterCubicMinmax( filterCubicMinmax_ ) + {} + + VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + FilterCubicImageViewImageFormatPropertiesEXT & operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + FilterCubicImageViewImageFormatPropertiesEXT & operator=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( FilterCubicImageViewImageFormatPropertiesEXT ) ); + return *this; + } + + + operator VkFilterCubicImageViewImageFormatPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFilterCubicImageViewImageFormatPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( FilterCubicImageViewImageFormatPropertiesEXT const& ) const = default; +#else + bool operator==( FilterCubicImageViewImageFormatPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( filterCubic == rhs.filterCubic ) + && ( filterCubicMinmax == rhs.filterCubicMinmax ); + } + + bool operator!=( FilterCubicImageViewImageFormatPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterCubic = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax = {}; + + }; + static_assert( sizeof( FilterCubicImageViewImageFormatPropertiesEXT ) == sizeof( VkFilterCubicImageViewImageFormatPropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = FilterCubicImageViewImageFormatPropertiesEXT; + }; + + struct FramebufferAttachmentImageInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentImageInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo(VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t layerCount_ = {}, uint32_t viewFormatCount_ = {}, const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), usage( usage_ ), width( width_ ), height( height_ ), layerCount( layerCount_ ), viewFormatCount( viewFormatCount_ ), pViewFormats( pViewFormats_ ) + {} + + VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferAttachmentImageInfo( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, uint32_t width_, uint32_t height_, uint32_t layerCount_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_ ) + : flags( flags_ ), usage( usage_ ), width( width_ ), height( height_ ), layerCount( layerCount_ ), viewFormatCount( static_cast( viewFormats_.size() ) ), pViewFormats( viewFormats_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + FramebufferAttachmentImageInfo & operator=( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + FramebufferAttachmentImageInfo & operator=( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( FramebufferAttachmentImageInfo ) ); + return *this; + } + + FramebufferAttachmentImageInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + FramebufferAttachmentImageInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + FramebufferAttachmentImageInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + FramebufferAttachmentImageInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + FramebufferAttachmentImageInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + FramebufferAttachmentImageInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } + + FramebufferAttachmentImageInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT + { + viewFormatCount = viewFormatCount_; + return *this; + } + + FramebufferAttachmentImageInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ ) VULKAN_HPP_NOEXCEPT + { + pViewFormats = pViewFormats_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + FramebufferAttachmentImageInfo & setViewFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_ ) VULKAN_HPP_NOEXCEPT + { + viewFormatCount = static_cast( viewFormats_.size() ); + pViewFormats = viewFormats_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkFramebufferAttachmentImageInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFramebufferAttachmentImageInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( FramebufferAttachmentImageInfo const& ) const = default; +#else + bool operator==( FramebufferAttachmentImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( usage == rhs.usage ) + && ( width == rhs.width ) + && ( height == rhs.height ) + && ( layerCount == rhs.layerCount ) + && ( viewFormatCount == rhs.viewFormatCount ) + && ( pViewFormats == rhs.pViewFormats ); + } + + bool operator!=( FramebufferAttachmentImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentImageInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + uint32_t width = {}; + uint32_t height = {}; + uint32_t layerCount = {}; + uint32_t viewFormatCount = {}; + const VULKAN_HPP_NAMESPACE::Format* pViewFormats = {}; + + }; + static_assert( sizeof( FramebufferAttachmentImageInfo ) == sizeof( VkFramebufferAttachmentImageInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = FramebufferAttachmentImageInfo; + }; + using FramebufferAttachmentImageInfoKHR = FramebufferAttachmentImageInfo; + + struct FramebufferAttachmentsCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentsCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo(uint32_t attachmentImageInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo* pAttachmentImageInfos_ = {}) VULKAN_HPP_NOEXCEPT + : attachmentImageInfoCount( attachmentImageInfoCount_ ), pAttachmentImageInfos( pAttachmentImageInfos_ ) + {} + + VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferAttachmentsCreateInfo( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + FramebufferAttachmentsCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachmentImageInfos_ ) + : attachmentImageInfoCount( static_cast( attachmentImageInfos_.size() ) ), pAttachmentImageInfos( attachmentImageInfos_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + FramebufferAttachmentsCreateInfo & operator=( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + FramebufferAttachmentsCreateInfo & operator=( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( FramebufferAttachmentsCreateInfo ) ); + return *this; + } + + FramebufferAttachmentsCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + FramebufferAttachmentsCreateInfo & setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentImageInfoCount = attachmentImageInfoCount_; + return *this; + } + + FramebufferAttachmentsCreateInfo & setPAttachmentImageInfos( const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo* pAttachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT + { + pAttachmentImageInfos = pAttachmentImageInfos_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + FramebufferAttachmentsCreateInfo & setAttachmentImageInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT + { + attachmentImageInfoCount = static_cast( attachmentImageInfos_.size() ); + pAttachmentImageInfos = attachmentImageInfos_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkFramebufferAttachmentsCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFramebufferAttachmentsCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( FramebufferAttachmentsCreateInfo const& ) const = default; +#else + bool operator==( FramebufferAttachmentsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( attachmentImageInfoCount == rhs.attachmentImageInfoCount ) + && ( pAttachmentImageInfos == rhs.pAttachmentImageInfos ); + } + + bool operator!=( FramebufferAttachmentsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentsCreateInfo; + const void* pNext = {}; + uint32_t attachmentImageInfoCount = {}; + const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo* pAttachmentImageInfos = {}; + + }; + static_assert( sizeof( FramebufferAttachmentsCreateInfo ) == sizeof( VkFramebufferAttachmentsCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = FramebufferAttachmentsCreateInfo; + }; + using FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo; + + struct GraphicsShaderGroupCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsShaderGroupCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV(uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ = {}) VULKAN_HPP_NOEXCEPT + : stageCount( stageCount_ ), pStages( pStages_ ), pVertexInputState( pVertexInputState_ ), pTessellationState( pTessellationState_ ) + {} + + VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GraphicsShaderGroupCreateInfoNV( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + GraphicsShaderGroupCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ = {} ) + : stageCount( static_cast( stages_.size() ) ), pStages( stages_.data() ), pVertexInputState( pVertexInputState_ ), pTessellationState( pTessellationState_ ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + GraphicsShaderGroupCreateInfoNV & operator=( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + GraphicsShaderGroupCreateInfoNV & operator=( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( GraphicsShaderGroupCreateInfoNV ) ); + return *this; + } + + GraphicsShaderGroupCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + GraphicsShaderGroupCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + GraphicsShaderGroupCreateInfoNV & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + GraphicsShaderGroupCreateInfoNV & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + GraphicsShaderGroupCreateInfoNV & setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ ) VULKAN_HPP_NOEXCEPT + { + pVertexInputState = pVertexInputState_; + return *this; + } + + GraphicsShaderGroupCreateInfoNV & setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ ) VULKAN_HPP_NOEXCEPT + { + pTessellationState = pTessellationState_; + return *this; + } + + + operator VkGraphicsShaderGroupCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGraphicsShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( GraphicsShaderGroupCreateInfoNV const& ) const = default; +#else + bool operator==( GraphicsShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( stageCount == rhs.stageCount ) + && ( pStages == rhs.pStages ) + && ( pVertexInputState == rhs.pVertexInputState ) + && ( pTessellationState == rhs.pTessellationState ); + } + + bool operator!=( GraphicsShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsShaderGroupCreateInfoNV; + const void* pNext = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {}; + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState = {}; + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState = {}; + + }; + static_assert( sizeof( GraphicsShaderGroupCreateInfoNV ) == sizeof( VkGraphicsShaderGroupCreateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = GraphicsShaderGroupCreateInfoNV; + }; + + struct GraphicsPipelineShaderGroupsCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV(uint32_t groupCount_ = {}, const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV* pGroups_ = {}, uint32_t pipelineCount_ = {}, const VULKAN_HPP_NAMESPACE::Pipeline* pPipelines_ = {}) VULKAN_HPP_NOEXCEPT + : groupCount( groupCount_ ), pGroups( pGroups_ ), pipelineCount( pipelineCount_ ), pPipelines( pPipelines_ ) + {} + + VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GraphicsPipelineShaderGroupsCreateInfoNV( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + GraphicsPipelineShaderGroupsCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelines_ = {} ) + : groupCount( static_cast( groups_.size() ) ), pGroups( groups_.data() ), pipelineCount( static_cast( pipelines_.size() ) ), pPipelines( pipelines_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + GraphicsPipelineShaderGroupsCreateInfoNV & operator=( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + GraphicsPipelineShaderGroupsCreateInfoNV & operator=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( GraphicsPipelineShaderGroupsCreateInfoNV ) ); + return *this; + } + + GraphicsPipelineShaderGroupsCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + GraphicsPipelineShaderGroupsCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = groupCount_; + return *this; + } + + GraphicsPipelineShaderGroupsCreateInfoNV & setPGroups( const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV* pGroups_ ) VULKAN_HPP_NOEXCEPT + { + pGroups = pGroups_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + GraphicsPipelineShaderGroupsCreateInfoNV & setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = static_cast( groups_.size() ); + pGroups = groups_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + GraphicsPipelineShaderGroupsCreateInfoNV & setPipelineCount( uint32_t pipelineCount_ ) VULKAN_HPP_NOEXCEPT + { + pipelineCount = pipelineCount_; + return *this; + } + + GraphicsPipelineShaderGroupsCreateInfoNV & setPPipelines( const VULKAN_HPP_NAMESPACE::Pipeline* pPipelines_ ) VULKAN_HPP_NOEXCEPT + { + pPipelines = pPipelines_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + GraphicsPipelineShaderGroupsCreateInfoNV & setPipelines( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelines_ ) VULKAN_HPP_NOEXCEPT + { + pipelineCount = static_cast( pipelines_.size() ); + pPipelines = pipelines_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkGraphicsPipelineShaderGroupsCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGraphicsPipelineShaderGroupsCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( GraphicsPipelineShaderGroupsCreateInfoNV const& ) const = default; +#else + bool operator==( GraphicsPipelineShaderGroupsCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( groupCount == rhs.groupCount ) + && ( pGroups == rhs.pGroups ) + && ( pipelineCount == rhs.pipelineCount ) + && ( pPipelines == rhs.pPipelines ); + } + + bool operator!=( GraphicsPipelineShaderGroupsCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV; + const void* pNext = {}; + uint32_t groupCount = {}; + const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV* pGroups = {}; + uint32_t pipelineCount = {}; + const VULKAN_HPP_NAMESPACE::Pipeline* pPipelines = {}; + + }; + static_assert( sizeof( GraphicsPipelineShaderGroupsCreateInfoNV ) == sizeof( VkGraphicsPipelineShaderGroupsCreateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = GraphicsPipelineShaderGroupsCreateInfoNV; + }; + + struct HeadlessSurfaceCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHeadlessSurfaceCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT(VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HeadlessSurfaceCreateInfoEXT( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + HeadlessSurfaceCreateInfoEXT & operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + HeadlessSurfaceCreateInfoEXT & operator=( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( HeadlessSurfaceCreateInfoEXT ) ); + return *this; + } + + HeadlessSurfaceCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + HeadlessSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + + operator VkHeadlessSurfaceCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHeadlessSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( HeadlessSurfaceCreateInfoEXT const& ) const = default; +#else + bool operator==( HeadlessSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ); + } + + bool operator!=( HeadlessSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHeadlessSurfaceCreateInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags = {}; + + }; + static_assert( sizeof( HeadlessSurfaceCreateInfoEXT ) == sizeof( VkHeadlessSurfaceCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = HeadlessSurfaceCreateInfoEXT; + }; + +#ifdef VK_USE_PLATFORM_IOS_MVK + struct IOSSurfaceCreateInfoMVK + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIosSurfaceCreateInfoMVK; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK(VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ = {}, const void* pView_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), pView( pView_ ) + {} + + VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + IOSSurfaceCreateInfoMVK & operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + IOSSurfaceCreateInfoMVK & operator=( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( IOSSurfaceCreateInfoMVK ) ); + return *this; + } + + IOSSurfaceCreateInfoMVK & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + IOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + IOSSurfaceCreateInfoMVK & setPView( const void* pView_ ) VULKAN_HPP_NOEXCEPT + { + pView = pView_; + return *this; + } + + + operator VkIOSSurfaceCreateInfoMVK const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( IOSSurfaceCreateInfoMVK const& ) const = default; +#else + bool operator==( IOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( pView == rhs.pView ); + } + + bool operator!=( IOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIosSurfaceCreateInfoMVK; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags = {}; + const void* pView = {}; + + }; + static_assert( sizeof( IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = IOSSurfaceCreateInfoMVK; + }; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + + struct ImageDrmFormatModifierExplicitCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT(uint64_t drmFormatModifier_ = {}, uint32_t drmFormatModifierPlaneCount_ = {}, const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts_ = {}) VULKAN_HPP_NOEXCEPT + : drmFormatModifier( drmFormatModifier_ ), drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ), pPlaneLayouts( pPlaneLayouts_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & planeLayouts_ ) + : drmFormatModifier( drmFormatModifier_ ), drmFormatModifierPlaneCount( static_cast( planeLayouts_.size() ) ), pPlaneLayouts( planeLayouts_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) ); + return *this; + } + + ImageDrmFormatModifierExplicitCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifier = drmFormatModifier_; + return *this; + } + + ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_; + return *this; + } + + ImageDrmFormatModifierExplicitCreateInfoEXT & setPPlaneLayouts( const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pPlaneLayouts = pPlaneLayouts_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + ImageDrmFormatModifierExplicitCreateInfoEXT & setPlaneLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & planeLayouts_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifierPlaneCount = static_cast( planeLayouts_.size() ); + pPlaneLayouts = planeLayouts_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkImageDrmFormatModifierExplicitCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageDrmFormatModifierExplicitCreateInfoEXT const& ) const = default; +#else + bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( drmFormatModifier == rhs.drmFormatModifier ) + && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) + && ( pPlaneLayouts == rhs.pPlaneLayouts ); + } + + bool operator!=( ImageDrmFormatModifierExplicitCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT; + const void* pNext = {}; + uint64_t drmFormatModifier = {}; + uint32_t drmFormatModifierPlaneCount = {}; + const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts = {}; + + }; + static_assert( sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierExplicitCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageDrmFormatModifierExplicitCreateInfoEXT; + }; + + struct ImageDrmFormatModifierListCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierListCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT(uint32_t drmFormatModifierCount_ = {}, const uint64_t* pDrmFormatModifiers_ = {}) VULKAN_HPP_NOEXCEPT + : drmFormatModifierCount( drmFormatModifierCount_ ), pDrmFormatModifiers( pDrmFormatModifiers_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + ImageDrmFormatModifierListCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drmFormatModifiers_ ) + : drmFormatModifierCount( static_cast( drmFormatModifiers_.size() ) ), pDrmFormatModifiers( drmFormatModifiers_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ImageDrmFormatModifierListCreateInfoEXT & operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageDrmFormatModifierListCreateInfoEXT & operator=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImageDrmFormatModifierListCreateInfoEXT ) ); + return *this; + } + + ImageDrmFormatModifierListCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifierCount = drmFormatModifierCount_; + return *this; + } + + ImageDrmFormatModifierListCreateInfoEXT & setPDrmFormatModifiers( const uint64_t* pDrmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT + { + pDrmFormatModifiers = pDrmFormatModifiers_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifiers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifierCount = static_cast( drmFormatModifiers_.size() ); + pDrmFormatModifiers = drmFormatModifiers_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkImageDrmFormatModifierListCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageDrmFormatModifierListCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageDrmFormatModifierListCreateInfoEXT const& ) const = default; +#else + bool operator==( ImageDrmFormatModifierListCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) + && ( pDrmFormatModifiers == rhs.pDrmFormatModifiers ); + } + + bool operator!=( ImageDrmFormatModifierListCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT; + const void* pNext = {}; + uint32_t drmFormatModifierCount = {}; + const uint64_t* pDrmFormatModifiers = {}; + + }; + static_assert( sizeof( ImageDrmFormatModifierListCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierListCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageDrmFormatModifierListCreateInfoEXT; + }; + + struct ImageFormatListCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatListCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo(uint32_t viewFormatCount_ = {}, const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ = {}) VULKAN_HPP_NOEXCEPT + : viewFormatCount( viewFormatCount_ ), pViewFormats( pViewFormats_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatListCreateInfo( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + ImageFormatListCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_ ) + : viewFormatCount( static_cast( viewFormats_.size() ) ), pViewFormats( viewFormats_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ImageFormatListCreateInfo & operator=( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageFormatListCreateInfo & operator=( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImageFormatListCreateInfo ) ); + return *this; + } + + ImageFormatListCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageFormatListCreateInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT + { + viewFormatCount = viewFormatCount_; + return *this; + } + + ImageFormatListCreateInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ ) VULKAN_HPP_NOEXCEPT + { + pViewFormats = pViewFormats_; + return *this; + } + +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + ImageFormatListCreateInfo & setViewFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_ ) VULKAN_HPP_NOEXCEPT + { + viewFormatCount = static_cast( viewFormats_.size() ); + pViewFormats = viewFormats_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkImageFormatListCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageFormatListCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageFormatListCreateInfo const& ) const = default; +#else + bool operator==( ImageFormatListCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( viewFormatCount == rhs.viewFormatCount ) + && ( pViewFormats == rhs.pViewFormats ); + } + + bool operator!=( ImageFormatListCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatListCreateInfo; + const void* pNext = {}; + uint32_t viewFormatCount = {}; + const VULKAN_HPP_NAMESPACE::Format* pViewFormats = {}; + + }; + static_assert( sizeof( ImageFormatListCreateInfo ) == sizeof( VkImageFormatListCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageFormatListCreateInfo; + }; + using ImageFormatListCreateInfoKHR = ImageFormatListCreateInfo; + +#ifdef VK_USE_PLATFORM_FUCHSIA + struct ImagePipeSurfaceCreateInfoFUCHSIA + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA(VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = {}, zx_handle_t imagePipeHandle_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), imagePipeHandle( imagePipeHandle_ ) + {} + + VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ImagePipeSurfaceCreateInfoFUCHSIA & operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImagePipeSurfaceCreateInfoFUCHSIA & operator=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) ); + return *this; + } + + ImagePipeSurfaceCreateInfoFUCHSIA & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImagePipeSurfaceCreateInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ImagePipeSurfaceCreateInfoFUCHSIA & setImagePipeHandle( zx_handle_t imagePipeHandle_ ) VULKAN_HPP_NOEXCEPT + { + imagePipeHandle = imagePipeHandle_; + return *this; + } + + + operator VkImagePipeSurfaceCreateInfoFUCHSIA const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImagePipeSurfaceCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImagePipeSurfaceCreateInfoFUCHSIA const& ) const = default; +#else + bool operator==( ImagePipeSurfaceCreateInfoFUCHSIA const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( memcmp( &imagePipeHandle, &rhs.imagePipeHandle, sizeof( zx_handle_t ) ) == 0 ); + } + + bool operator!=( ImagePipeSurfaceCreateInfoFUCHSIA const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags = {}; + zx_handle_t imagePipeHandle = {}; + + }; + static_assert( sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) == sizeof( VkImagePipeSurfaceCreateInfoFUCHSIA ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImagePipeSurfaceCreateInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + struct ImagePlaneMemoryRequirementsInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagePlaneMemoryRequirementsInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo(VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor) VULKAN_HPP_NOEXCEPT + : planeAspect( planeAspect_ ) + {} + + VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ImagePlaneMemoryRequirementsInfo & operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImagePlaneMemoryRequirementsInfo & operator=( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImagePlaneMemoryRequirementsInfo ) ); + return *this; + } + + ImagePlaneMemoryRequirementsInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImagePlaneMemoryRequirementsInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT + { + planeAspect = planeAspect_; + return *this; + } + + + operator VkImagePlaneMemoryRequirementsInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImagePlaneMemoryRequirementsInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImagePlaneMemoryRequirementsInfo const& ) const = default; +#else + bool operator==( ImagePlaneMemoryRequirementsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( planeAspect == rhs.planeAspect ); + } + + bool operator!=( ImagePlaneMemoryRequirementsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; + + }; + static_assert( sizeof( ImagePlaneMemoryRequirementsInfo ) == sizeof( VkImagePlaneMemoryRequirementsInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImagePlaneMemoryRequirementsInfo; + }; + using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo; + + struct ImageStencilUsageCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageStencilUsageCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo(VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ = {}) VULKAN_HPP_NOEXCEPT + : stencilUsage( stencilUsage_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageStencilUsageCreateInfo( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ImageStencilUsageCreateInfo & operator=( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageStencilUsageCreateInfo & operator=( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImageStencilUsageCreateInfo ) ); + return *this; + } + + ImageStencilUsageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageStencilUsageCreateInfo & setStencilUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ ) VULKAN_HPP_NOEXCEPT + { + stencilUsage = stencilUsage_; + return *this; + } + + + operator VkImageStencilUsageCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageStencilUsageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageStencilUsageCreateInfo const& ) const = default; +#else + bool operator==( ImageStencilUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( stencilUsage == rhs.stencilUsage ); + } + + bool operator!=( ImageStencilUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageStencilUsageCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage = {}; + + }; + static_assert( sizeof( ImageStencilUsageCreateInfo ) == sizeof( VkImageStencilUsageCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageStencilUsageCreateInfo; + }; + using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo; + + struct ImageSwapchainCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSwapchainCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}) VULKAN_HPP_NOEXCEPT + : swapchain( swapchain_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ImageSwapchainCreateInfoKHR & operator=( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageSwapchainCreateInfoKHR & operator=( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImageSwapchainCreateInfoKHR ) ); + return *this; + } + + ImageSwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageSwapchainCreateInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT + { + swapchain = swapchain_; + return *this; + } + + + operator VkImageSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageSwapchainCreateInfoKHR const& ) const = default; +#else + bool operator==( ImageSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( swapchain == rhs.swapchain ); + } + + bool operator!=( ImageSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSwapchainCreateInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; + + }; + static_assert( sizeof( ImageSwapchainCreateInfoKHR ) == sizeof( VkImageSwapchainCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageSwapchainCreateInfoKHR; + }; + + struct ImageViewASTCDecodeModeEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAstcDecodeModeEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT(VULKAN_HPP_NAMESPACE::Format decodeMode_ = VULKAN_HPP_NAMESPACE::Format::eUndefined) VULKAN_HPP_NOEXCEPT + : decodeMode( decodeMode_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ImageViewASTCDecodeModeEXT & operator=( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageViewASTCDecodeModeEXT & operator=( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImageViewASTCDecodeModeEXT ) ); + return *this; + } + + ImageViewASTCDecodeModeEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageViewASTCDecodeModeEXT & setDecodeMode( VULKAN_HPP_NAMESPACE::Format decodeMode_ ) VULKAN_HPP_NOEXCEPT + { + decodeMode = decodeMode_; + return *this; + } + + + operator VkImageViewASTCDecodeModeEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewASTCDecodeModeEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageViewASTCDecodeModeEXT const& ) const = default; +#else + bool operator==( ImageViewASTCDecodeModeEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( decodeMode == rhs.decodeMode ); + } + + bool operator!=( ImageViewASTCDecodeModeEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAstcDecodeModeEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Format decodeMode = VULKAN_HPP_NAMESPACE::Format::eUndefined; + + }; + static_assert( sizeof( ImageViewASTCDecodeModeEXT ) == sizeof( VkImageViewASTCDecodeModeEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageViewASTCDecodeModeEXT; + }; + + struct ImageViewUsageCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewUsageCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo(VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}) VULKAN_HPP_NOEXCEPT + : usage( usage_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ImageViewUsageCreateInfo & operator=( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageViewUsageCreateInfo & operator=( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImageViewUsageCreateInfo ) ); + return *this; + } + + ImageViewUsageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageViewUsageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + + operator VkImageViewUsageCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewUsageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageViewUsageCreateInfo const& ) const = default; +#else + bool operator==( ImageViewUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( usage == rhs.usage ); + } + + bool operator!=( ImageViewUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewUsageCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + + }; + static_assert( sizeof( ImageViewUsageCreateInfo ) == sizeof( VkImageViewUsageCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageViewUsageCreateInfo; + }; + using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo; + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + struct ImportAndroidHardwareBufferInfoANDROID + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportAndroidHardwareBufferInfoANDROID; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID(struct AHardwareBuffer* buffer_ = {}) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ImportAndroidHardwareBufferInfoANDROID & operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImportAndroidHardwareBufferInfoANDROID & operator=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImportAndroidHardwareBufferInfoANDROID ) ); + return *this; + } + + ImportAndroidHardwareBufferInfoANDROID & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportAndroidHardwareBufferInfoANDROID & setBuffer( struct AHardwareBuffer* buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + + operator VkImportAndroidHardwareBufferInfoANDROID const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImportAndroidHardwareBufferInfoANDROID const& ) const = default; +#else + bool operator==( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( buffer == rhs.buffer ); + } + + bool operator!=( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID; + const void* pNext = {}; + struct AHardwareBuffer* buffer = {}; + + }; + static_assert( sizeof( ImportAndroidHardwareBufferInfoANDROID ) == sizeof( VkImportAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImportAndroidHardwareBufferInfoANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + struct ImportMemoryFdInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, int fd_ = {}) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ), fd( fd_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ImportMemoryFdInfoKHR & operator=( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImportMemoryFdInfoKHR & operator=( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImportMemoryFdInfoKHR ) ); + return *this; + } + + ImportMemoryFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportMemoryFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + ImportMemoryFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT + { + fd = fd_; + return *this; + } + + + operator VkImportMemoryFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImportMemoryFdInfoKHR const& ) const = default; +#else + bool operator==( ImportMemoryFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleType == rhs.handleType ) + && ( fd == rhs.fd ); + } + + bool operator!=( ImportMemoryFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryFdInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + int fd = {}; + + }; + static_assert( sizeof( ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImportMemoryFdInfoKHR; + }; + + struct ImportMemoryHostPointerInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryHostPointerInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, void* pHostPointer_ = {}) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ), pHostPointer( pHostPointer_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ImportMemoryHostPointerInfoEXT & operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImportMemoryHostPointerInfoEXT & operator=( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImportMemoryHostPointerInfoEXT ) ); + return *this; + } + + ImportMemoryHostPointerInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportMemoryHostPointerInfoEXT & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + ImportMemoryHostPointerInfoEXT & setPHostPointer( void* pHostPointer_ ) VULKAN_HPP_NOEXCEPT + { + pHostPointer = pHostPointer_; + return *this; + } + + + operator VkImportMemoryHostPointerInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryHostPointerInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImportMemoryHostPointerInfoEXT const& ) const = default; +#else + bool operator==( ImportMemoryHostPointerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleType == rhs.handleType ) + && ( pHostPointer == rhs.pHostPointer ); + } + + bool operator!=( ImportMemoryHostPointerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + void* pHostPointer = {}; + + }; + static_assert( sizeof( ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImportMemoryHostPointerInfoEXT; + }; + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ImportMemoryWin32HandleInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ), handle( handle_ ), name( name_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ImportMemoryWin32HandleInfoKHR & operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImportMemoryWin32HandleInfoKHR & operator=( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImportMemoryWin32HandleInfoKHR ) ); + return *this; + } + + ImportMemoryWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportMemoryWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + ImportMemoryWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } + + ImportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } + + + operator VkImportMemoryWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImportMemoryWin32HandleInfoKHR const& ) const = default; +#else + bool operator==( ImportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleType == rhs.handleType ) + && ( handle == rhs.handle ) + && ( name == rhs.name ); + } + + bool operator!=( ImportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + HANDLE handle = {}; + LPCWSTR name = {}; + + }; + static_assert( sizeof( ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImportMemoryWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ImportMemoryWin32HandleInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ = {}, HANDLE handle_ = {}) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ), handle( handle_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ImportMemoryWin32HandleInfoNV & operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImportMemoryWin32HandleInfoNV & operator=( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ImportMemoryWin32HandleInfoNV ) ); + return *this; + } + + ImportMemoryWin32HandleInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportMemoryWin32HandleInfoNV & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + ImportMemoryWin32HandleInfoNV & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } + + + operator VkImportMemoryWin32HandleInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImportMemoryWin32HandleInfoNV const& ) const = default; +#else + bool operator==( ImportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleType == rhs.handleType ) + && ( handle == rhs.handle ); + } + + bool operator!=( ImportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType = {}; + HANDLE handle = {}; + + }; + static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImportMemoryWin32HandleInfoNV; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + struct InputAttachmentAspectReference { - VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( uint32_t subpass_ = {}, - uint32_t inputAttachmentIndex_ = {}, - VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT - : subpass( subpass_ ) - , inputAttachmentIndex( inputAttachmentIndex_ ) - , aspectMask( aspectMask_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference(uint32_t subpass_ = {}, uint32_t inputAttachmentIndex_ = {}, VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}) VULKAN_HPP_NOEXCEPT + : subpass( subpass_ ), inputAttachmentIndex( inputAttachmentIndex_ ), aspectMask( aspectMask_ ) {} + VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; + InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - InputAttachmentAspectReference& operator=( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT + InputAttachmentAspectReference & operator=( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + InputAttachmentAspectReference & operator=( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( InputAttachmentAspectReference ) ); return *this; } @@ -38926,6 +59661,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkInputAttachmentAspectReference const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -38936,6 +59672,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( InputAttachmentAspectReference const& ) const = default; +#else bool operator==( InputAttachmentAspectReference const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( subpass == rhs.subpass ) @@ -38947,45 +59687,53 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: uint32_t subpass = {}; uint32_t inputAttachmentIndex = {}; VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + }; static_assert( sizeof( InputAttachmentAspectReference ) == sizeof( VkInputAttachmentAspectReference ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference; struct InstanceCreateInfo { - VULKAN_HPP_CONSTEXPR InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ = {}, - const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo_ = {}, - uint32_t enabledLayerCount_ = {}, - const char* const* ppEnabledLayerNames_ = {}, - uint32_t enabledExtensionCount_ = {}, - const char* const* ppEnabledExtensionNames_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , pApplicationInfo( pApplicationInfo_ ) - , enabledLayerCount( enabledLayerCount_ ) - , ppEnabledLayerNames( ppEnabledLayerNames_ ) - , enabledExtensionCount( enabledExtensionCount_ ) - , ppEnabledExtensionNames( ppEnabledExtensionNames_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInstanceCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR InstanceCreateInfo(VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ = {}, const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo_ = {}, uint32_t enabledLayerCount_ = {}, const char* const * ppEnabledLayerNames_ = {}, uint32_t enabledExtensionCount_ = {}, const char* const * ppEnabledExtensionNames_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), pApplicationInfo( pApplicationInfo_ ), enabledLayerCount( enabledLayerCount_ ), ppEnabledLayerNames( ppEnabledLayerNames_ ), enabledExtensionCount( enabledExtensionCount_ ), ppEnabledExtensionNames( ppEnabledExtensionNames_ ) {} - VULKAN_HPP_NAMESPACE::InstanceCreateInfo & operator=( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::InstanceCreateInfo ) - offsetof( InstanceCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR InstanceCreateInfo( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - InstanceCreateInfo& operator=( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_, const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ = {} ) + : flags( flags_ ), pApplicationInfo( pApplicationInfo_ ), enabledLayerCount( static_cast( pEnabledLayerNames_.size() ) ), ppEnabledLayerNames( pEnabledLayerNames_.data() ), enabledExtensionCount( static_cast( pEnabledExtensionNames_.size() ) ), ppEnabledExtensionNames( pEnabledExtensionNames_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + InstanceCreateInfo & operator=( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + InstanceCreateInfo & operator=( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( InstanceCreateInfo ) ); return *this; } @@ -39013,24 +59761,43 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - InstanceCreateInfo & setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + InstanceCreateInfo & setPpEnabledLayerNames( const char* const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT { ppEnabledLayerNames = ppEnabledLayerNames_; return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + InstanceCreateInfo & setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + { + enabledLayerCount = static_cast( pEnabledLayerNames_.size() ); + ppEnabledLayerNames = pEnabledLayerNames_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + InstanceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT { enabledExtensionCount = enabledExtensionCount_; return *this; } - InstanceCreateInfo & setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT + InstanceCreateInfo & setPpEnabledExtensionNames( const char* const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT { ppEnabledExtensionNames = ppEnabledExtensionNames_; return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + InstanceCreateInfo & setPEnabledExtensionNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT + { + enabledExtensionCount = static_cast( pEnabledExtensionNames_.size() ); + ppEnabledExtensionNames = pEnabledExtensionNames_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkInstanceCreateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -39041,6 +59808,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( InstanceCreateInfo const& ) const = default; +#else bool operator==( InstanceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -39057,6 +59828,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInstanceCreateInfo; @@ -39064,95 +59838,48 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags = {}; const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo = {}; uint32_t enabledLayerCount = {}; - const char* const* ppEnabledLayerNames = {}; + const char* const * ppEnabledLayerNames = {}; uint32_t enabledExtensionCount = {}; - const char* const* ppEnabledExtensionNames = {}; + const char* const * ppEnabledExtensionNames = {}; + }; static_assert( sizeof( InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct LayerProperties + template <> + struct CppType { - LayerProperties( std::array const& layerName_ = {}, - uint32_t specVersion_ = {}, - uint32_t implementationVersion_ = {}, - std::array const& description_ = {} ) VULKAN_HPP_NOEXCEPT - : layerName{} - , specVersion( specVersion_ ) - , implementationVersion( implementationVersion_ ) - , description{} - { - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( layerName, layerName_ ); - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( description, description_ ); - } - - LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - LayerProperties& operator=( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkLayerProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkLayerProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( LayerProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( memcmp( layerName, rhs.layerName, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 ) - && ( specVersion == rhs.specVersion ) - && ( implementationVersion == rhs.implementationVersion ) - && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 ); - } - - bool operator!=( LayerProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - char layerName[VK_MAX_EXTENSION_NAME_SIZE] = {}; - uint32_t specVersion = {}; - uint32_t implementationVersion = {}; - char description[VK_MAX_DESCRIPTION_SIZE] = {}; + using Type = InstanceCreateInfo; }; - static_assert( sizeof( LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #ifdef VK_USE_PLATFORM_MACOS_MVK - struct MacOSSurfaceCreateInfoMVK { - VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ = {}, - const void* pView_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , pView( pView_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMacosSurfaceCreateInfoMVK; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK(VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ = {}, const void* pView_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), pView( pView_ ) {} - VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & operator=( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK ) - offsetof( MacOSSurfaceCreateInfoMVK, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - MacOSSurfaceCreateInfoMVK& operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + MacOSSurfaceCreateInfoMVK & operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MacOSSurfaceCreateInfoMVK & operator=( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( MacOSSurfaceCreateInfoMVK ) ); return *this; } @@ -39174,6 +59901,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkMacOSSurfaceCreateInfoMVK const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -39184,6 +59912,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MacOSSurfaceCreateInfoMVK const& ) const = default; +#else bool operator==( MacOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -39196,124 +59928,54 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK; const void* pNext = {}; VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags = {}; const void* pView = {}; + }; static_assert( sizeof( MacOSSurfaceCreateInfoMVK ) == sizeof( VkMacOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ - struct MappedMemoryRange + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR MappedMemoryRange( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT - : memory( memory_ ) - , offset( offset_ ) - , size( size_ ) - {} - - VULKAN_HPP_NAMESPACE::MappedMemoryRange & operator=( VULKAN_HPP_NAMESPACE::MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MappedMemoryRange ) - offsetof( MappedMemoryRange, pNext ) ); - return *this; - } - - MappedMemoryRange( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MappedMemoryRange& operator=( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - MappedMemoryRange & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - MappedMemoryRange & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT - { - memory = memory_; - return *this; - } - - MappedMemoryRange & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT - { - offset = offset_; - return *this; - } - - MappedMemoryRange & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT - { - size = size_; - return *this; - } - - operator VkMappedMemoryRange const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMappedMemoryRange &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( MappedMemoryRange const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memory == rhs.memory ) - && ( offset == rhs.offset ) - && ( size == rhs.size ); - } - - bool operator!=( MappedMemoryRange const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMappedMemoryRange; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + using Type = MacOSSurfaceCreateInfoMVK; }; - static_assert( sizeof( MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ struct MemoryAllocateFlagsInfo { - VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ = {}, - uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , deviceMask( deviceMask_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateFlagsInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo(VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ = {}, uint32_t deviceMask_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), deviceMask( deviceMask_ ) {} - VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo & operator=( VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo ) - offsetof( MemoryAllocateFlagsInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - MemoryAllocateFlagsInfo& operator=( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryAllocateFlagsInfo & operator=( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryAllocateFlagsInfo & operator=( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( MemoryAllocateFlagsInfo ) ); return *this; } @@ -39335,6 +59997,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkMemoryAllocateFlagsInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -39345,6 +60008,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryAllocateFlagsInfo const& ) const = default; +#else bool operator==( MemoryAllocateFlagsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -39357,188 +60024,54 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateFlagsInfo; const void* pNext = {}; VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags = {}; uint32_t deviceMask = {}; + }; static_assert( sizeof( MemoryAllocateFlagsInfo ) == sizeof( VkMemoryAllocateFlagsInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct MemoryAllocateInfo + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, - uint32_t memoryTypeIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : allocationSize( allocationSize_ ) - , memoryTypeIndex( memoryTypeIndex_ ) - {} - - VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & operator=( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo ) - offsetof( MemoryAllocateInfo, pNext ) ); - return *this; - } - - MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryAllocateInfo& operator=( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - MemoryAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - MemoryAllocateInfo & setAllocationSize( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ ) VULKAN_HPP_NOEXCEPT - { - allocationSize = allocationSize_; - return *this; - } - - MemoryAllocateInfo & setMemoryTypeIndex( uint32_t memoryTypeIndex_ ) VULKAN_HPP_NOEXCEPT - { - memoryTypeIndex = memoryTypeIndex_; - return *this; - } - - operator VkMemoryAllocateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( MemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( allocationSize == rhs.allocationSize ) - && ( memoryTypeIndex == rhs.memoryTypeIndex ); - } - - bool operator!=( MemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {}; - uint32_t memoryTypeIndex = {}; + using Type = MemoryAllocateFlagsInfo; }; - static_assert( sizeof( MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct MemoryBarrier - { - VULKAN_HPP_CONSTEXPR MemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {} ) VULKAN_HPP_NOEXCEPT - : srcAccessMask( srcAccessMask_ ) - , dstAccessMask( dstAccessMask_ ) - {} - - VULKAN_HPP_NAMESPACE::MemoryBarrier & operator=( VULKAN_HPP_NAMESPACE::MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryBarrier ) - offsetof( MemoryBarrier, pNext ) ); - return *this; - } - - MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryBarrier& operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - MemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - MemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT - { - srcAccessMask = srcAccessMask_; - return *this; - } - - MemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT - { - dstAccessMask = dstAccessMask_; - return *this; - } - - operator VkMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( MemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( srcAccessMask == rhs.srcAccessMask ) - && ( dstAccessMask == rhs.dstAccessMask ); - } - - bool operator!=( MemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; - }; - static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo; struct MemoryDedicatedAllocateInfo { - VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, - VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT - : image( image_ ) - , buffer( buffer_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}) VULKAN_HPP_NOEXCEPT + : image( image_ ), buffer( buffer_ ) {} - VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo & operator=( VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo ) - offsetof( MemoryDedicatedAllocateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - MemoryDedicatedAllocateInfo& operator=( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryDedicatedAllocateInfo & operator=( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryDedicatedAllocateInfo & operator=( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( MemoryDedicatedAllocateInfo ) ); return *this; } @@ -39560,6 +60093,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkMemoryDedicatedAllocateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -39570,6 +60104,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryDedicatedAllocateInfo const& ) const = default; +#else bool operator==( MemoryDedicatedAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -39582,41 +60120,58 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedAllocateInfo; const void* pNext = {}; VULKAN_HPP_NAMESPACE::Image image = {}; VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; static_assert( sizeof( MemoryDedicatedAllocateInfo ) == sizeof( VkMemoryDedicatedAllocateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = MemoryDedicatedAllocateInfo; + }; + using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo; + struct MemoryDedicatedRequirements { - MemoryDedicatedRequirements( VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT - : prefersDedicatedAllocation( prefersDedicatedAllocation_ ) - , requiresDedicatedAllocation( requiresDedicatedAllocation_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedRequirements; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements(VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation_ = {}) VULKAN_HPP_NOEXCEPT + : prefersDedicatedAllocation( prefersDedicatedAllocation_ ), requiresDedicatedAllocation( requiresDedicatedAllocation_ ) {} - VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements & operator=( VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements ) - offsetof( MemoryDedicatedRequirements, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryDedicatedRequirements( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - MemoryDedicatedRequirements& operator=( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryDedicatedRequirements & operator=( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + MemoryDedicatedRequirements & operator=( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( MemoryDedicatedRequirements ) ); + return *this; + } + + operator VkMemoryDedicatedRequirements const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -39627,6 +60182,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryDedicatedRequirements const& ) const = default; +#else bool operator==( MemoryDedicatedRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -39639,410 +60198,54 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedRequirements; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation = {}; VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation = {}; + }; static_assert( sizeof( MemoryDedicatedRequirements ) == sizeof( VkMemoryDedicatedRequirements ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct MemoryFdPropertiesKHR + template <> + struct CppType { - MemoryFdPropertiesKHR( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT - : memoryTypeBits( memoryTypeBits_ ) - {} - - VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR & operator=( VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR ) - offsetof( MemoryFdPropertiesKHR, pNext ) ); - return *this; - } - - MemoryFdPropertiesKHR( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryFdPropertiesKHR& operator=( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkMemoryFdPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMemoryFdPropertiesKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( MemoryFdPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryTypeBits == rhs.memoryTypeBits ); - } - - bool operator!=( MemoryFdPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryFdPropertiesKHR; - void* pNext = {}; - uint32_t memoryTypeBits = {}; + using Type = MemoryDedicatedRequirements; }; - static_assert( sizeof( MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - - struct MemoryGetAndroidHardwareBufferInfoANDROID - { - VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {} ) VULKAN_HPP_NOEXCEPT - : memory( memory_ ) - {} - - VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & operator=( VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID ) - offsetof( MemoryGetAndroidHardwareBufferInfoANDROID, pNext ) ); - return *this; - } - - MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryGetAndroidHardwareBufferInfoANDROID& operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - MemoryGetAndroidHardwareBufferInfoANDROID & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - MemoryGetAndroidHardwareBufferInfoANDROID & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT - { - memory = memory_; - return *this; - } - - operator VkMemoryGetAndroidHardwareBufferInfoANDROID const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMemoryGetAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memory == rhs.memory ); - } - - bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - }; - static_assert( sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) == sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - - struct MemoryGetFdInfoKHR - { - VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT - : memory( memory_ ) - , handleType( handleType_ ) - {} - - VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & operator=( VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR ) - offsetof( MemoryGetFdInfoKHR, pNext ) ); - return *this; - } - - MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryGetFdInfoKHR& operator=( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - MemoryGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - MemoryGetFdInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT - { - memory = memory_; - return *this; - } - - MemoryGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - operator VkMemoryGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMemoryGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( MemoryGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memory == rhs.memory ) - && ( handleType == rhs.handleType ); - } - - bool operator!=( MemoryGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetFdInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; - }; - static_assert( sizeof( MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - struct MemoryGetWin32HandleInfoKHR - { - VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT - : memory( memory_ ) - , handleType( handleType_ ) - {} - - VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR ) - offsetof( MemoryGetWin32HandleInfoKHR, pNext ) ); - return *this; - } - - MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryGetWin32HandleInfoKHR& operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - MemoryGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - MemoryGetWin32HandleInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT - { - memory = memory_; - return *this; - } - - MemoryGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - operator VkMemoryGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMemoryGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( MemoryGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memory == rhs.memory ) - && ( handleType == rhs.handleType ); - } - - bool operator!=( MemoryGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; - }; - static_assert( sizeof( MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - struct MemoryHeap - { - MemoryHeap( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, - VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT - : size( size_ ) - , flags( flags_ ) - {} - - MemoryHeap( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryHeap& operator=( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkMemoryHeap const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMemoryHeap &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( MemoryHeap const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( size == rhs.size ) - && ( flags == rhs.flags ); - } - - bool operator!=( MemoryHeap const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; - VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags = {}; - }; - static_assert( sizeof( MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct MemoryHostPointerPropertiesEXT - { - MemoryHostPointerPropertiesEXT( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT - : memoryTypeBits( memoryTypeBits_ ) - {} - - VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT ) - offsetof( MemoryHostPointerPropertiesEXT, pNext ) ); - return *this; - } - - MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryHostPointerPropertiesEXT& operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkMemoryHostPointerPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMemoryHostPointerPropertiesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( MemoryHostPointerPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryTypeBits == rhs.memoryTypeBits ); - } - - bool operator!=( MemoryHostPointerPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT; - void* pNext = {}; - uint32_t memoryTypeBits = {}; - }; - static_assert( sizeof( MemoryHostPointerPropertiesEXT ) == sizeof( VkMemoryHostPointerPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements; struct MemoryOpaqueCaptureAddressAllocateInfo { - VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( uint64_t opaqueCaptureAddress_ = {} ) VULKAN_HPP_NOEXCEPT - : opaqueCaptureAddress( opaqueCaptureAddress_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo(uint64_t opaqueCaptureAddress_ = {}) VULKAN_HPP_NOEXCEPT + : opaqueCaptureAddress( opaqueCaptureAddress_ ) {} - VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo & operator=( VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo ) - offsetof( MemoryOpaqueCaptureAddressAllocateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryOpaqueCaptureAddressAllocateInfo( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - MemoryOpaqueCaptureAddressAllocateInfo& operator=( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryOpaqueCaptureAddressAllocateInfo & operator=( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryOpaqueCaptureAddressAllocateInfo & operator=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( MemoryOpaqueCaptureAddressAllocateInfo ) ); return *this; } @@ -40058,6 +60261,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkMemoryOpaqueCaptureAddressAllocateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -40068,6 +60272,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryOpaqueCaptureAddressAllocateInfo const& ) const = default; +#else bool operator==( MemoryOpaqueCaptureAddressAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -40079,35 +60287,53 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo; const void* pNext = {}; uint64_t opaqueCaptureAddress = {}; + }; static_assert( sizeof( MemoryOpaqueCaptureAddressAllocateInfo ) == sizeof( VkMemoryOpaqueCaptureAddressAllocateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = MemoryOpaqueCaptureAddressAllocateInfo; + }; + using MemoryOpaqueCaptureAddressAllocateInfoKHR = MemoryOpaqueCaptureAddressAllocateInfo; + struct MemoryPriorityAllocateInfoEXT { - VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( float priority_ = {} ) VULKAN_HPP_NOEXCEPT - : priority( priority_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryPriorityAllocateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT(float priority_ = {}) VULKAN_HPP_NOEXCEPT + : priority( priority_ ) {} - VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT ) - offsetof( MemoryPriorityAllocateInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryPriorityAllocateInfoEXT( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - MemoryPriorityAllocateInfoEXT& operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryPriorityAllocateInfoEXT & operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryPriorityAllocateInfoEXT & operator=( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( MemoryPriorityAllocateInfoEXT ) ); return *this; } @@ -40123,6 +60349,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkMemoryPriorityAllocateInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -40133,6 +60360,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryPriorityAllocateInfoEXT const& ) const = default; +#else bool operator==( MemoryPriorityAllocateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -40144,246 +60375,53 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryPriorityAllocateInfoEXT; const void* pNext = {}; float priority = {}; + }; static_assert( sizeof( MemoryPriorityAllocateInfoEXT ) == sizeof( VkMemoryPriorityAllocateInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct MemoryRequirements + template <> + struct CppType { - MemoryRequirements( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize alignment_ = {}, - uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT - : size( size_ ) - , alignment( alignment_ ) - , memoryTypeBits( memoryTypeBits_ ) - {} - - MemoryRequirements( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryRequirements& operator=( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkMemoryRequirements const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( MemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( size == rhs.size ) - && ( alignment == rhs.alignment ) - && ( memoryTypeBits == rhs.memoryTypeBits ); - } - - bool operator!=( MemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; - VULKAN_HPP_NAMESPACE::DeviceSize alignment = {}; - uint32_t memoryTypeBits = {}; + using Type = MemoryPriorityAllocateInfoEXT; }; - static_assert( sizeof( MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct MemoryRequirements2 - { - MemoryRequirements2( VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT - : memoryRequirements( memoryRequirements_ ) - {} - - VULKAN_HPP_NAMESPACE::MemoryRequirements2 & operator=( VULKAN_HPP_NAMESPACE::MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryRequirements2 ) - offsetof( MemoryRequirements2, pNext ) ); - return *this; - } - - MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryRequirements2& operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkMemoryRequirements2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( MemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryRequirements == rhs.memoryRequirements ); - } - - bool operator!=( MemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryRequirements2; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {}; - }; - static_assert( sizeof( MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct MemoryType - { - MemoryType( VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = {}, - uint32_t heapIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : propertyFlags( propertyFlags_ ) - , heapIndex( heapIndex_ ) - {} - - MemoryType( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryType& operator=( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkMemoryType const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMemoryType &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( MemoryType const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( propertyFlags == rhs.propertyFlags ) - && ( heapIndex == rhs.heapIndex ); - } - - bool operator!=( MemoryType const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags = {}; - uint32_t heapIndex = {}; - }; - static_assert( sizeof( MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - struct MemoryWin32HandlePropertiesKHR - { - MemoryWin32HandlePropertiesKHR( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT - : memoryTypeBits( memoryTypeBits_ ) - {} - - VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR & operator=( VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR ) - offsetof( MemoryWin32HandlePropertiesKHR, pNext ) ); - return *this; - } - - MemoryWin32HandlePropertiesKHR( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryWin32HandlePropertiesKHR& operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkMemoryWin32HandlePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMemoryWin32HandlePropertiesKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( MemoryWin32HandlePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryTypeBits == rhs.memoryTypeBits ); - } - - bool operator!=( MemoryWin32HandlePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR; - void* pNext = {}; - uint32_t memoryTypeBits = {}; - }; - static_assert( sizeof( MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_METAL_EXT - struct MetalSurfaceCreateInfoEXT { - VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ = {}, - const CAMetalLayer* pLayer_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , pLayer( pLayer_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMetalSurfaceCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT(VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ = {}, const CAMetalLayer* pLayer_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), pLayer( pLayer_ ) {} - VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT ) - offsetof( MetalSurfaceCreateInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - MetalSurfaceCreateInfoEXT& operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + MetalSurfaceCreateInfoEXT & operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MetalSurfaceCreateInfoEXT & operator=( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( MetalSurfaceCreateInfoEXT ) ); return *this; } @@ -40405,6 +60443,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkMetalSurfaceCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -40415,6 +60454,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MetalSurfaceCreateInfoEXT const& ) const = default; +#else bool operator==( MetalSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -40427,981 +60470,57 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMetalSurfaceCreateInfoEXT; const void* pNext = {}; VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags = {}; const CAMetalLayer* pLayer = {}; + }; static_assert( sizeof( MetalSurfaceCreateInfoEXT ) == sizeof( VkMetalSurfaceCreateInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MetalSurfaceCreateInfoEXT; + }; #endif /*VK_USE_PLATFORM_METAL_EXT*/ - struct MultisamplePropertiesEXT - { - MultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {} ) VULKAN_HPP_NOEXCEPT - : maxSampleLocationGridSize( maxSampleLocationGridSize_ ) - {} - - VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT ) - offsetof( MultisamplePropertiesEXT, pNext ) ); - return *this; - } - - MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MultisamplePropertiesEXT& operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkMultisamplePropertiesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( MultisamplePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ); - } - - bool operator!=( MultisamplePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisamplePropertiesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; - }; - static_assert( sizeof( MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ObjectTableCreateInfoNVX - { - VULKAN_HPP_CONSTEXPR ObjectTableCreateInfoNVX( uint32_t objectCount_ = {}, - const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes_ = {}, - const uint32_t* pObjectEntryCounts_ = {}, - const VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ = {}, - uint32_t maxUniformBuffersPerDescriptor_ = {}, - uint32_t maxStorageBuffersPerDescriptor_ = {}, - uint32_t maxStorageImagesPerDescriptor_ = {}, - uint32_t maxSampledImagesPerDescriptor_ = {}, - uint32_t maxPipelineLayouts_ = {} ) VULKAN_HPP_NOEXCEPT - : objectCount( objectCount_ ) - , pObjectEntryTypes( pObjectEntryTypes_ ) - , pObjectEntryCounts( pObjectEntryCounts_ ) - , pObjectEntryUsageFlags( pObjectEntryUsageFlags_ ) - , maxUniformBuffersPerDescriptor( maxUniformBuffersPerDescriptor_ ) - , maxStorageBuffersPerDescriptor( maxStorageBuffersPerDescriptor_ ) - , maxStorageImagesPerDescriptor( maxStorageImagesPerDescriptor_ ) - , maxSampledImagesPerDescriptor( maxSampledImagesPerDescriptor_ ) - , maxPipelineLayouts( maxPipelineLayouts_ ) - {} - - VULKAN_HPP_NAMESPACE::ObjectTableCreateInfoNVX & operator=( VULKAN_HPP_NAMESPACE::ObjectTableCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ObjectTableCreateInfoNVX ) - offsetof( ObjectTableCreateInfoNVX, pNext ) ); - return *this; - } - - ObjectTableCreateInfoNVX( VkObjectTableCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ObjectTableCreateInfoNVX& operator=( VkObjectTableCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ObjectTableCreateInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ObjectTableCreateInfoNVX & setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT - { - objectCount = objectCount_; - return *this; - } - - ObjectTableCreateInfoNVX & setPObjectEntryTypes( const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes_ ) VULKAN_HPP_NOEXCEPT - { - pObjectEntryTypes = pObjectEntryTypes_; - return *this; - } - - ObjectTableCreateInfoNVX & setPObjectEntryCounts( const uint32_t* pObjectEntryCounts_ ) VULKAN_HPP_NOEXCEPT - { - pObjectEntryCounts = pObjectEntryCounts_; - return *this; - } - - ObjectTableCreateInfoNVX & setPObjectEntryUsageFlags( const VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ ) VULKAN_HPP_NOEXCEPT - { - pObjectEntryUsageFlags = pObjectEntryUsageFlags_; - return *this; - } - - ObjectTableCreateInfoNVX & setMaxUniformBuffersPerDescriptor( uint32_t maxUniformBuffersPerDescriptor_ ) VULKAN_HPP_NOEXCEPT - { - maxUniformBuffersPerDescriptor = maxUniformBuffersPerDescriptor_; - return *this; - } - - ObjectTableCreateInfoNVX & setMaxStorageBuffersPerDescriptor( uint32_t maxStorageBuffersPerDescriptor_ ) VULKAN_HPP_NOEXCEPT - { - maxStorageBuffersPerDescriptor = maxStorageBuffersPerDescriptor_; - return *this; - } - - ObjectTableCreateInfoNVX & setMaxStorageImagesPerDescriptor( uint32_t maxStorageImagesPerDescriptor_ ) VULKAN_HPP_NOEXCEPT - { - maxStorageImagesPerDescriptor = maxStorageImagesPerDescriptor_; - return *this; - } - - ObjectTableCreateInfoNVX & setMaxSampledImagesPerDescriptor( uint32_t maxSampledImagesPerDescriptor_ ) VULKAN_HPP_NOEXCEPT - { - maxSampledImagesPerDescriptor = maxSampledImagesPerDescriptor_; - return *this; - } - - ObjectTableCreateInfoNVX & setMaxPipelineLayouts( uint32_t maxPipelineLayouts_ ) VULKAN_HPP_NOEXCEPT - { - maxPipelineLayouts = maxPipelineLayouts_; - return *this; - } - - operator VkObjectTableCreateInfoNVX const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkObjectTableCreateInfoNVX &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ObjectTableCreateInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( objectCount == rhs.objectCount ) - && ( pObjectEntryTypes == rhs.pObjectEntryTypes ) - && ( pObjectEntryCounts == rhs.pObjectEntryCounts ) - && ( pObjectEntryUsageFlags == rhs.pObjectEntryUsageFlags ) - && ( maxUniformBuffersPerDescriptor == rhs.maxUniformBuffersPerDescriptor ) - && ( maxStorageBuffersPerDescriptor == rhs.maxStorageBuffersPerDescriptor ) - && ( maxStorageImagesPerDescriptor == rhs.maxStorageImagesPerDescriptor ) - && ( maxSampledImagesPerDescriptor == rhs.maxSampledImagesPerDescriptor ) - && ( maxPipelineLayouts == rhs.maxPipelineLayouts ); - } - - bool operator!=( ObjectTableCreateInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eObjectTableCreateInfoNVX; - const void* pNext = {}; - uint32_t objectCount = {}; - const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes = {}; - const uint32_t* pObjectEntryCounts = {}; - const VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags = {}; - uint32_t maxUniformBuffersPerDescriptor = {}; - uint32_t maxStorageBuffersPerDescriptor = {}; - uint32_t maxStorageImagesPerDescriptor = {}; - uint32_t maxSampledImagesPerDescriptor = {}; - uint32_t maxPipelineLayouts = {}; - }; - static_assert( sizeof( ObjectTableCreateInfoNVX ) == sizeof( VkObjectTableCreateInfoNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ObjectTableEntryNVX - { - VULKAN_HPP_CONSTEXPR ObjectTableEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet, - VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {} ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - , flags( flags_ ) - {} - - ObjectTableEntryNVX( VkObjectTableEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ObjectTableEntryNVX& operator=( VkObjectTableEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ObjectTableEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT - { - type = type_; - return *this; - } - - ObjectTableEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - operator VkObjectTableEntryNVX const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkObjectTableEntryNVX &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ObjectTableEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( type == rhs.type ) - && ( flags == rhs.flags ); - } - - bool operator!=( ObjectTableEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet; - VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {}; - }; - static_assert( sizeof( ObjectTableEntryNVX ) == sizeof( VkObjectTableEntryNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ObjectTableDescriptorSetEntryNVX - { - VULKAN_HPP_CONSTEXPR ObjectTableDescriptorSetEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet, - VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, - VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet_ = {} ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - , flags( flags_ ) - , pipelineLayout( pipelineLayout_ ) - , descriptorSet( descriptorSet_ ) - {} - - explicit ObjectTableDescriptorSetEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX, - VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, - VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet_ = {} ) - : type( objectTableEntryNVX.type ) - , flags( objectTableEntryNVX.flags ) - , pipelineLayout( pipelineLayout_ ) - , descriptorSet( descriptorSet_ ) - {} - - ObjectTableDescriptorSetEntryNVX( VkObjectTableDescriptorSetEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ObjectTableDescriptorSetEntryNVX& operator=( VkObjectTableDescriptorSetEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ObjectTableDescriptorSetEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT - { - type = type_; - return *this; - } - - ObjectTableDescriptorSetEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - ObjectTableDescriptorSetEntryNVX & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT - { - pipelineLayout = pipelineLayout_; - return *this; - } - - ObjectTableDescriptorSetEntryNVX & setDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet_ ) VULKAN_HPP_NOEXCEPT - { - descriptorSet = descriptorSet_; - return *this; - } - - operator VkObjectTableDescriptorSetEntryNVX const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkObjectTableDescriptorSetEntryNVX &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ObjectTableDescriptorSetEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( type == rhs.type ) - && ( flags == rhs.flags ) - && ( pipelineLayout == rhs.pipelineLayout ) - && ( descriptorSet == rhs.descriptorSet ); - } - - bool operator!=( ObjectTableDescriptorSetEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet; - VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {}; - VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {}; - VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet = {}; - }; - static_assert( sizeof( ObjectTableDescriptorSetEntryNVX ) == sizeof( VkObjectTableDescriptorSetEntryNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ObjectTableIndexBufferEntryNVX - { - VULKAN_HPP_CONSTEXPR ObjectTableIndexBufferEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet, - VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {}, - VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - , flags( flags_ ) - , buffer( buffer_ ) - , indexType( indexType_ ) - {} - - explicit ObjectTableIndexBufferEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX, - VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 ) - : type( objectTableEntryNVX.type ) - , flags( objectTableEntryNVX.flags ) - , buffer( buffer_ ) - , indexType( indexType_ ) - {} - - ObjectTableIndexBufferEntryNVX( VkObjectTableIndexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ObjectTableIndexBufferEntryNVX& operator=( VkObjectTableIndexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ObjectTableIndexBufferEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT - { - type = type_; - return *this; - } - - ObjectTableIndexBufferEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - ObjectTableIndexBufferEntryNVX & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT - { - buffer = buffer_; - return *this; - } - - ObjectTableIndexBufferEntryNVX & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT - { - indexType = indexType_; - return *this; - } - - operator VkObjectTableIndexBufferEntryNVX const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkObjectTableIndexBufferEntryNVX &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ObjectTableIndexBufferEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( type == rhs.type ) - && ( flags == rhs.flags ) - && ( buffer == rhs.buffer ) - && ( indexType == rhs.indexType ); - } - - bool operator!=( ObjectTableIndexBufferEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet; - VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {}; - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; - VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; - }; - static_assert( sizeof( ObjectTableIndexBufferEntryNVX ) == sizeof( VkObjectTableIndexBufferEntryNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ObjectTablePipelineEntryNVX - { - VULKAN_HPP_CONSTEXPR ObjectTablePipelineEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet, - VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {}, - VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {} ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - , flags( flags_ ) - , pipeline( pipeline_ ) - {} - - explicit ObjectTablePipelineEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX, - VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {} ) - : type( objectTableEntryNVX.type ) - , flags( objectTableEntryNVX.flags ) - , pipeline( pipeline_ ) - {} - - ObjectTablePipelineEntryNVX( VkObjectTablePipelineEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ObjectTablePipelineEntryNVX& operator=( VkObjectTablePipelineEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ObjectTablePipelineEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT - { - type = type_; - return *this; - } - - ObjectTablePipelineEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - ObjectTablePipelineEntryNVX & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT - { - pipeline = pipeline_; - return *this; - } - - operator VkObjectTablePipelineEntryNVX const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkObjectTablePipelineEntryNVX &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ObjectTablePipelineEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( type == rhs.type ) - && ( flags == rhs.flags ) - && ( pipeline == rhs.pipeline ); - } - - bool operator!=( ObjectTablePipelineEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet; - VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {}; - VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; - }; - static_assert( sizeof( ObjectTablePipelineEntryNVX ) == sizeof( VkObjectTablePipelineEntryNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ObjectTablePushConstantEntryNVX - { - VULKAN_HPP_CONSTEXPR ObjectTablePushConstantEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet, - VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {} ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - , flags( flags_ ) - , pipelineLayout( pipelineLayout_ ) - , stageFlags( stageFlags_ ) - {} - - explicit ObjectTablePushConstantEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX, - VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {} ) - : type( objectTableEntryNVX.type ) - , flags( objectTableEntryNVX.flags ) - , pipelineLayout( pipelineLayout_ ) - , stageFlags( stageFlags_ ) - {} - - ObjectTablePushConstantEntryNVX( VkObjectTablePushConstantEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ObjectTablePushConstantEntryNVX& operator=( VkObjectTablePushConstantEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ObjectTablePushConstantEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT - { - type = type_; - return *this; - } - - ObjectTablePushConstantEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - ObjectTablePushConstantEntryNVX & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT - { - pipelineLayout = pipelineLayout_; - return *this; - } - - ObjectTablePushConstantEntryNVX & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT - { - stageFlags = stageFlags_; - return *this; - } - - operator VkObjectTablePushConstantEntryNVX const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkObjectTablePushConstantEntryNVX &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ObjectTablePushConstantEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( type == rhs.type ) - && ( flags == rhs.flags ) - && ( pipelineLayout == rhs.pipelineLayout ) - && ( stageFlags == rhs.stageFlags ); - } - - bool operator!=( ObjectTablePushConstantEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet; - VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {}; - VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {}; - VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; - }; - static_assert( sizeof( ObjectTablePushConstantEntryNVX ) == sizeof( VkObjectTablePushConstantEntryNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ObjectTableVertexBufferEntryNVX - { - VULKAN_HPP_CONSTEXPR ObjectTableVertexBufferEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet, - VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {}, - VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - , flags( flags_ ) - , buffer( buffer_ ) - {} - - explicit ObjectTableVertexBufferEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX, - VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) - : type( objectTableEntryNVX.type ) - , flags( objectTableEntryNVX.flags ) - , buffer( buffer_ ) - {} - - ObjectTableVertexBufferEntryNVX( VkObjectTableVertexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ObjectTableVertexBufferEntryNVX& operator=( VkObjectTableVertexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ObjectTableVertexBufferEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT - { - type = type_; - return *this; - } - - ObjectTableVertexBufferEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - ObjectTableVertexBufferEntryNVX & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT - { - buffer = buffer_; - return *this; - } - - operator VkObjectTableVertexBufferEntryNVX const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkObjectTableVertexBufferEntryNVX &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ObjectTableVertexBufferEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( type == rhs.type ) - && ( flags == rhs.flags ) - && ( buffer == rhs.buffer ); - } - - bool operator!=( ObjectTableVertexBufferEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet; - VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {}; - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; - }; - static_assert( sizeof( ObjectTableVertexBufferEntryNVX ) == sizeof( VkObjectTableVertexBufferEntryNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct PastPresentationTimingGOOGLE - { - PastPresentationTimingGOOGLE( uint32_t presentID_ = {}, - uint64_t desiredPresentTime_ = {}, - uint64_t actualPresentTime_ = {}, - uint64_t earliestPresentTime_ = {}, - uint64_t presentMargin_ = {} ) VULKAN_HPP_NOEXCEPT - : presentID( presentID_ ) - , desiredPresentTime( desiredPresentTime_ ) - , actualPresentTime( actualPresentTime_ ) - , earliestPresentTime( earliestPresentTime_ ) - , presentMargin( presentMargin_ ) - {} - - PastPresentationTimingGOOGLE( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PastPresentationTimingGOOGLE& operator=( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkPastPresentationTimingGOOGLE const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPastPresentationTimingGOOGLE &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( PastPresentationTimingGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( presentID == rhs.presentID ) - && ( desiredPresentTime == rhs.desiredPresentTime ) - && ( actualPresentTime == rhs.actualPresentTime ) - && ( earliestPresentTime == rhs.earliestPresentTime ) - && ( presentMargin == rhs.presentMargin ); - } - - bool operator!=( PastPresentationTimingGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - uint32_t presentID = {}; - uint64_t desiredPresentTime = {}; - uint64_t actualPresentTime = {}; - uint64_t earliestPresentTime = {}; - uint64_t presentMargin = {}; - }; - static_assert( sizeof( PastPresentationTimingGOOGLE ) == sizeof( VkPastPresentationTimingGOOGLE ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct PerformanceConfigurationAcquireInfoINTEL - { - VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - {} - - VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & operator=( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL ) - offsetof( PerformanceConfigurationAcquireInfoINTEL, pNext ) ); - return *this; - } - - PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PerformanceConfigurationAcquireInfoINTEL& operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PerformanceConfigurationAcquireInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PerformanceConfigurationAcquireInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT - { - type = type_; - return *this; - } - - operator VkPerformanceConfigurationAcquireInfoINTEL const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPerformanceConfigurationAcquireInfoINTEL &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( PerformanceConfigurationAcquireInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( type == rhs.type ); - } - - bool operator!=( PerformanceConfigurationAcquireInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated; - }; - static_assert( sizeof( PerformanceConfigurationAcquireInfoINTEL ) == sizeof( VkPerformanceConfigurationAcquireInfoINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct PerformanceCounterDescriptionKHR - { - PerformanceCounterDescriptionKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_ = {}, - std::array const& name_ = {}, - std::array const& category_ = {}, - std::array const& description_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , name{} - , category{} - , description{} - { - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( name, name_ ); - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( category, category_ ); - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( description, description_ ); - } - - VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR & operator=( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR ) - offsetof( PerformanceCounterDescriptionKHR, pNext ) ); - return *this; - } - - PerformanceCounterDescriptionKHR( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PerformanceCounterDescriptionKHR& operator=( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkPerformanceCounterDescriptionKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPerformanceCounterDescriptionKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( PerformanceCounterDescriptionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( memcmp( name, rhs.name, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 ) - && ( memcmp( category, rhs.category, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 ) - && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 ); - } - - bool operator!=( PerformanceCounterDescriptionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterDescriptionKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags = {}; - char name[VK_MAX_DESCRIPTION_SIZE] = {}; - char category[VK_MAX_DESCRIPTION_SIZE] = {}; - char description[VK_MAX_DESCRIPTION_SIZE] = {}; - }; - static_assert( sizeof( PerformanceCounterDescriptionKHR ) == sizeof( VkPerformanceCounterDescriptionKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct PerformanceCounterKHR - { - PerformanceCounterKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit_ = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric, - VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope_ = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer, - VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage_ = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32, - std::array const& uuid_ = {} ) VULKAN_HPP_NOEXCEPT - : unit( unit_ ) - , scope( scope_ ) - , storage( storage_ ) - , uuid{} - { - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( uuid, uuid_ ); - } - - VULKAN_HPP_NAMESPACE::PerformanceCounterKHR & operator=( VULKAN_HPP_NAMESPACE::PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterKHR ) - offsetof( PerformanceCounterKHR, pNext ) ); - return *this; - } - - PerformanceCounterKHR( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PerformanceCounterKHR& operator=( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkPerformanceCounterKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( PerformanceCounterKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( unit == rhs.unit ) - && ( scope == rhs.scope ) - && ( storage == rhs.storage ) - && ( memcmp( uuid, rhs.uuid, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 ); - } - - bool operator!=( PerformanceCounterKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric; - VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer; - VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32; - uint8_t uuid[VK_UUID_SIZE] = {}; - }; - static_assert( sizeof( PerformanceCounterKHR ) == sizeof( VkPerformanceCounterKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - union PerformanceCounterResultKHR { - PerformanceCounterResultKHR( int32_t int32_ = {} ) + PerformanceCounterResultKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR const& rhs ) VULKAN_HPP_NOEXCEPT { - int32 = int32_; + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR ) ); } + PerformanceCounterResultKHR( int32_t int32_ = {} ) + : int32( int32_ ) + {} + PerformanceCounterResultKHR( int64_t int64_ ) - { - int64 = int64_; - } + : int64( int64_ ) + {} PerformanceCounterResultKHR( uint32_t uint32_ ) - { - uint32 = uint32_; - } + : uint32( uint32_ ) + {} PerformanceCounterResultKHR( uint64_t uint64_ ) - { - uint64 = uint64_; - } + : uint64( uint64_ ) + {} PerformanceCounterResultKHR( float float32_ ) - { - float32 = float32_; - } + : float32( float32_ ) + {} PerformanceCounterResultKHR( double float64_ ) - { - float64 = float64_; - } + : float64( float64_ ) + {} PerformanceCounterResultKHR & setInt32( int32_t int32_ ) VULKAN_HPP_NOEXCEPT { @@ -41441,7 +60560,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR & operator=( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( this, &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR ) ); + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR ) ); return *this; } @@ -41463,176 +60582,33 @@ namespace VULKAN_HPP_NAMESPACE double float64; }; - struct PerformanceMarkerInfoINTEL - { - VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( uint64_t marker_ = {} ) VULKAN_HPP_NOEXCEPT - : marker( marker_ ) - {} - - VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & operator=( VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL ) - offsetof( PerformanceMarkerInfoINTEL, pNext ) ); - return *this; - } - - PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PerformanceMarkerInfoINTEL& operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PerformanceMarkerInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PerformanceMarkerInfoINTEL & setMarker( uint64_t marker_ ) VULKAN_HPP_NOEXCEPT - { - marker = marker_; - return *this; - } - - operator VkPerformanceMarkerInfoINTEL const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( PerformanceMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( marker == rhs.marker ); - } - - bool operator!=( PerformanceMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL; - const void* pNext = {}; - uint64_t marker = {}; - }; - static_assert( sizeof( PerformanceMarkerInfoINTEL ) == sizeof( VkPerformanceMarkerInfoINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct PerformanceOverrideInfoINTEL - { - VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware, - VULKAN_HPP_NAMESPACE::Bool32 enable_ = {}, - uint64_t parameter_ = {} ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - , enable( enable_ ) - , parameter( parameter_ ) - {} - - VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & operator=( VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL ) - offsetof( PerformanceOverrideInfoINTEL, pNext ) ); - return *this; - } - - PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PerformanceOverrideInfoINTEL& operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PerformanceOverrideInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PerformanceOverrideInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT - { - type = type_; - return *this; - } - - PerformanceOverrideInfoINTEL & setEnable( VULKAN_HPP_NAMESPACE::Bool32 enable_ ) VULKAN_HPP_NOEXCEPT - { - enable = enable_; - return *this; - } - - PerformanceOverrideInfoINTEL & setParameter( uint64_t parameter_ ) VULKAN_HPP_NOEXCEPT - { - parameter = parameter_; - return *this; - } - - operator VkPerformanceOverrideInfoINTEL const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( PerformanceOverrideInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( type == rhs.type ) - && ( enable == rhs.enable ) - && ( parameter == rhs.parameter ); - } - - bool operator!=( PerformanceOverrideInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware; - VULKAN_HPP_NAMESPACE::Bool32 enable = {}; - uint64_t parameter = {}; - }; - static_assert( sizeof( PerformanceOverrideInfoINTEL ) == sizeof( VkPerformanceOverrideInfoINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct PerformanceQuerySubmitInfoKHR { - VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( uint32_t counterPassIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : counterPassIndex( counterPassIndex_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceQuerySubmitInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR(uint32_t counterPassIndex_ = {}) VULKAN_HPP_NOEXCEPT + : counterPassIndex( counterPassIndex_ ) {} - VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR & operator=( VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR ) - offsetof( PerformanceQuerySubmitInfoKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PerformanceQuerySubmitInfoKHR( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PerformanceQuerySubmitInfoKHR& operator=( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PerformanceQuerySubmitInfoKHR & operator=( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PerformanceQuerySubmitInfoKHR & operator=( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PerformanceQuerySubmitInfoKHR ) ); return *this; } @@ -41648,6 +60624,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPerformanceQuerySubmitInfoKHR const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -41658,6 +60635,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PerformanceQuerySubmitInfoKHR const& ) const = default; +#else bool operator==( PerformanceQuerySubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -41669,237 +60650,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceQuerySubmitInfoKHR; const void* pNext = {}; uint32_t counterPassIndex = {}; + }; static_assert( sizeof( PerformanceQuerySubmitInfoKHR ) == sizeof( VkPerformanceQuerySubmitInfoKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct PerformanceStreamMarkerInfoINTEL + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( uint32_t marker_ = {} ) VULKAN_HPP_NOEXCEPT - : marker( marker_ ) - {} - - VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & operator=( VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL ) - offsetof( PerformanceStreamMarkerInfoINTEL, pNext ) ); - return *this; - } - - PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PerformanceStreamMarkerInfoINTEL& operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PerformanceStreamMarkerInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PerformanceStreamMarkerInfoINTEL & setMarker( uint32_t marker_ ) VULKAN_HPP_NOEXCEPT - { - marker = marker_; - return *this; - } - - operator VkPerformanceStreamMarkerInfoINTEL const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( PerformanceStreamMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( marker == rhs.marker ); - } - - bool operator!=( PerformanceStreamMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL; - const void* pNext = {}; - uint32_t marker = {}; + using Type = PerformanceQuerySubmitInfoKHR; }; - static_assert( sizeof( PerformanceStreamMarkerInfoINTEL ) == sizeof( VkPerformanceStreamMarkerInfoINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - union PerformanceValueDataINTEL - { - PerformanceValueDataINTEL( uint32_t value32_ = {} ) - { - value32 = value32_; - } - - PerformanceValueDataINTEL( uint64_t value64_ ) - { - value64 = value64_; - } - - PerformanceValueDataINTEL( float valueFloat_ ) - { - valueFloat = valueFloat_; - } - - PerformanceValueDataINTEL( const char* valueString_ ) - { - valueString = valueString_; - } - - PerformanceValueDataINTEL & setValue32( uint32_t value32_ ) VULKAN_HPP_NOEXCEPT - { - value32 = value32_; - return *this; - } - - PerformanceValueDataINTEL & setValue64( uint64_t value64_ ) VULKAN_HPP_NOEXCEPT - { - value64 = value64_; - return *this; - } - - PerformanceValueDataINTEL & setValueFloat( float valueFloat_ ) VULKAN_HPP_NOEXCEPT - { - valueFloat = valueFloat_; - return *this; - } - - PerformanceValueDataINTEL & setValueBool( VULKAN_HPP_NAMESPACE::Bool32 valueBool_ ) VULKAN_HPP_NOEXCEPT - { - valueBool = valueBool_; - return *this; - } - - PerformanceValueDataINTEL & setValueString( const char* valueString_ ) VULKAN_HPP_NOEXCEPT - { - valueString = valueString_; - return *this; - } - - VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL & operator=( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( this, &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) ); - return *this; - } - - operator VkPerformanceValueDataINTEL const&() const - { - return *reinterpret_cast(this); - } - - operator VkPerformanceValueDataINTEL &() - { - return *reinterpret_cast(this); - } - -#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS - uint32_t value32; - uint64_t value64; - float valueFloat; - VULKAN_HPP_NAMESPACE::Bool32 valueBool; - const char* valueString; -#else - uint32_t value32; - uint64_t value64; - float valueFloat; - VkBool32 valueBool; - const char* valueString; -#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ - }; - - struct PerformanceValueINTEL - { - PerformanceValueINTEL( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32, - VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ = {} ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - , data( data_ ) - {} - - PerformanceValueINTEL( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PerformanceValueINTEL& operator=( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PerformanceValueINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT - { - type = type_; - return *this; - } - - PerformanceValueINTEL & setData( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ ) VULKAN_HPP_NOEXCEPT - { - data = data_; - return *this; - } - - operator VkPerformanceValueINTEL const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPerformanceValueINTEL &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - public: - VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32; - VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data = {}; - }; - static_assert( sizeof( PerformanceValueINTEL ) == sizeof( VkPerformanceValueINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PhysicalDevice16BitStorageFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {} ) VULKAN_HPP_NOEXCEPT - : storageBuffer16BitAccess( storageBuffer16BitAccess_ ) - , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ) - , storagePushConstant16( storagePushConstant16_ ) - , storageInputOutput16( storageInputOutput16_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice16BitStorageFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures(VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}) VULKAN_HPP_NOEXCEPT + : storageBuffer16BitAccess( storageBuffer16BitAccess_ ), uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ), storagePushConstant16( storagePushConstant16_ ), storageInputOutput16( storageInputOutput16_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures ) - offsetof( PhysicalDevice16BitStorageFeatures, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDevice16BitStorageFeatures& operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevice16BitStorageFeatures & operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDevice16BitStorageFeatures & operator=( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDevice16BitStorageFeatures ) ); return *this; } @@ -41933,6 +60729,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDevice16BitStorageFeatures const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -41943,6 +60740,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDevice16BitStorageFeatures const& ) const = default; +#else bool operator==( PhysicalDevice16BitStorageFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -41957,6 +60758,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures; @@ -41965,34 +60769,140 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {}; VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {}; VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {}; + }; static_assert( sizeof( PhysicalDevice16BitStorageFeatures ) == sizeof( VkPhysicalDevice16BitStorageFeatures ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct PhysicalDevice8BitStorageFeatures + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {} ) VULKAN_HPP_NOEXCEPT - : storageBuffer8BitAccess( storageBuffer8BitAccess_ ) - , uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ) - , storagePushConstant8( storagePushConstant8_ ) + using Type = PhysicalDevice16BitStorageFeatures; + }; + using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures; + + struct PhysicalDevice4444FormatsFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ = {}, VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ = {}) VULKAN_HPP_NOEXCEPT + : formatA4R4G4B4( formatA4R4G4B4_ ), formatA4B4G4R4( formatA4B4G4R4_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevice4444FormatsFeaturesEXT( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures ) - offsetof( PhysicalDevice8BitStorageFeatures, pNext ) ); + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDevice4444FormatsFeaturesEXT & operator=( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDevice4444FormatsFeaturesEXT & operator=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDevice4444FormatsFeaturesEXT ) ); + return *this; + } + + PhysicalDevice4444FormatsFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDevice4444FormatsFeaturesEXT & setFormatA4R4G4B4( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ ) VULKAN_HPP_NOEXCEPT + { + formatA4R4G4B4 = formatA4R4G4B4_; + return *this; + } + + PhysicalDevice4444FormatsFeaturesEXT & setFormatA4B4G4R4( VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ ) VULKAN_HPP_NOEXCEPT + { + formatA4B4G4R4 = formatA4B4G4R4_; + return *this; + } + + + operator VkPhysicalDevice4444FormatsFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevice4444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDevice4444FormatsFeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDevice4444FormatsFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( formatA4R4G4B4 == rhs.formatA4R4G4B4 ) + && ( formatA4B4G4R4 == rhs.formatA4B4G4R4 ); + } + + bool operator!=( PhysicalDevice4444FormatsFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4 = {}; + VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4 = {}; + + }; + static_assert( sizeof( PhysicalDevice4444FormatsFeaturesEXT ) == sizeof( VkPhysicalDevice4444FormatsFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevice4444FormatsFeaturesEXT; + }; + + struct PhysicalDevice8BitStorageFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice8BitStorageFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures(VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}) VULKAN_HPP_NOEXCEPT + : storageBuffer8BitAccess( storageBuffer8BitAccess_ ), uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ), storagePushConstant8( storagePushConstant8_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDevice8BitStorageFeatures( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDevice8BitStorageFeatures& operator=( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevice8BitStorageFeatures & operator=( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDevice8BitStorageFeatures & operator=( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDevice8BitStorageFeatures ) ); return *this; } @@ -42020,6 +60930,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDevice8BitStorageFeatures const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -42030,6 +60941,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDevice8BitStorageFeatures const& ) const = default; +#else bool operator==( PhysicalDevice8BitStorageFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -42043,6 +60958,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice8BitStorageFeatures; @@ -42050,30 +60968,45 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {}; VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {}; VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {}; + }; static_assert( sizeof( PhysicalDevice8BitStorageFeatures ) == sizeof( VkPhysicalDevice8BitStorageFeatures ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDevice8BitStorageFeatures; + }; + using PhysicalDevice8BitStorageFeaturesKHR = PhysicalDevice8BitStorageFeatures; + struct PhysicalDeviceASTCDecodeFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = {} ) VULKAN_HPP_NOEXCEPT - : decodeModeSharedExponent( decodeModeSharedExponent_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = {}) VULKAN_HPP_NOEXCEPT + : decodeModeSharedExponent( decodeModeSharedExponent_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT ) - offsetof( PhysicalDeviceASTCDecodeFeaturesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceASTCDecodeFeaturesEXT& operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceASTCDecodeFeaturesEXT & operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceASTCDecodeFeaturesEXT & operator=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) ); return *this; } @@ -42089,6 +61022,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceASTCDecodeFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -42099,6 +61033,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceASTCDecodeFeaturesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceASTCDecodeFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -42110,35 +61048,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent = {}; + }; static_assert( sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) == sizeof( VkPhysicalDeviceASTCDecodeFeaturesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceASTCDecodeFeaturesEXT; + }; + struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = {} ) VULKAN_HPP_NOEXCEPT - : advancedBlendCoherentOperations( advancedBlendCoherentOperations_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = {}) VULKAN_HPP_NOEXCEPT + : advancedBlendCoherentOperations( advancedBlendCoherentOperations_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) - offsetof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceBlendOperationAdvancedFeaturesEXT& operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) ); return *this; } @@ -42154,6 +61109,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -42164,6 +61120,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -42175,48 +61135,56 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations = {}; + }; static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceBlendOperationAdvancedFeaturesEXT; + }; + struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT { - PhysicalDeviceBlendOperationAdvancedPropertiesEXT( uint32_t advancedBlendMaxColorAttachments_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_ = {} ) VULKAN_HPP_NOEXCEPT - : advancedBlendMaxColorAttachments( advancedBlendMaxColorAttachments_ ) - , advancedBlendIndependentBlend( advancedBlendIndependentBlend_ ) - , advancedBlendNonPremultipliedSrcColor( advancedBlendNonPremultipliedSrcColor_ ) - , advancedBlendNonPremultipliedDstColor( advancedBlendNonPremultipliedDstColor_ ) - , advancedBlendCorrelatedOverlap( advancedBlendCorrelatedOverlap_ ) - , advancedBlendAllOperations( advancedBlendAllOperations_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT(uint32_t advancedBlendMaxColorAttachments_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_ = {}) VULKAN_HPP_NOEXCEPT + : advancedBlendMaxColorAttachments( advancedBlendMaxColorAttachments_ ), advancedBlendIndependentBlend( advancedBlendIndependentBlend_ ), advancedBlendNonPremultipliedSrcColor( advancedBlendNonPremultipliedSrcColor_ ), advancedBlendNonPremultipliedDstColor( advancedBlendNonPremultipliedDstColor_ ), advancedBlendCorrelatedOverlap( advancedBlendCorrelatedOverlap_ ), advancedBlendAllOperations( advancedBlendAllOperations_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) - offsetof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceBlendOperationAdvancedPropertiesEXT& operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) ); + return *this; + } + + operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -42227,6 +61195,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -42243,6 +61215,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT; @@ -42253,34 +61228,44 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor = {}; VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap = {}; VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations = {}; + }; static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceBlendOperationAdvancedPropertiesEXT; + }; + struct PhysicalDeviceBufferDeviceAddressFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {} ) VULKAN_HPP_NOEXCEPT - : bufferDeviceAddress( bufferDeviceAddress_ ) - , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ) - , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures(VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}) VULKAN_HPP_NOEXCEPT + : bufferDeviceAddress( bufferDeviceAddress_ ), bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ), bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures ) - offsetof( PhysicalDeviceBufferDeviceAddressFeatures, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceBufferDeviceAddressFeatures( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceBufferDeviceAddressFeatures& operator=( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceBufferDeviceAddressFeatures & operator=( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceBufferDeviceAddressFeatures & operator=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceBufferDeviceAddressFeatures ) ); return *this; } @@ -42308,6 +61293,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceBufferDeviceAddressFeatures const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -42318,6 +61304,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceBufferDeviceAddressFeatures const& ) const = default; +#else bool operator==( PhysicalDeviceBufferDeviceAddressFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -42331,6 +61321,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures; @@ -42338,34 +61331,45 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; + }; static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeatures ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeatures ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceBufferDeviceAddressFeatures; + }; + using PhysicalDeviceBufferDeviceAddressFeaturesKHR = PhysicalDeviceBufferDeviceAddressFeatures; + struct PhysicalDeviceBufferDeviceAddressFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {} ) VULKAN_HPP_NOEXCEPT - : bufferDeviceAddress( bufferDeviceAddress_ ) - , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ) - , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}) VULKAN_HPP_NOEXCEPT + : bufferDeviceAddress( bufferDeviceAddress_ ), bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ), bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT ) - offsetof( PhysicalDeviceBufferDeviceAddressFeaturesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceBufferDeviceAddressFeaturesEXT& operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceBufferDeviceAddressFeaturesEXT ) ); return *this; } @@ -42393,6 +61397,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -42403,6 +61408,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceBufferDeviceAddressFeaturesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceBufferDeviceAddressFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -42416,6 +61425,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT; @@ -42423,30 +61435,45 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; + }; static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeaturesEXT ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceBufferDeviceAddressFeaturesEXT; + }; + using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT; + struct PhysicalDeviceCoherentMemoryFeaturesAMD { - VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ = {} ) VULKAN_HPP_NOEXCEPT - : deviceCoherentMemory( deviceCoherentMemory_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD(VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ = {}) VULKAN_HPP_NOEXCEPT + : deviceCoherentMemory( deviceCoherentMemory_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD ) - offsetof( PhysicalDeviceCoherentMemoryFeaturesAMD, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCoherentMemoryFeaturesAMD( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceCoherentMemoryFeaturesAMD& operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceCoherentMemoryFeaturesAMD ) ); return *this; } @@ -42462,6 +61489,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -42472,6 +61500,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceCoherentMemoryFeaturesAMD const& ) const = default; +#else bool operator==( PhysicalDeviceCoherentMemoryFeaturesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -42483,37 +61515,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory = {}; + }; static_assert( sizeof( PhysicalDeviceCoherentMemoryFeaturesAMD ) == sizeof( VkPhysicalDeviceCoherentMemoryFeaturesAMD ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceCoherentMemoryFeaturesAMD; + }; + struct PhysicalDeviceComputeShaderDerivativesFeaturesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = {} ) VULKAN_HPP_NOEXCEPT - : computeDerivativeGroupQuads( computeDerivativeGroupQuads_ ) - , computeDerivativeGroupLinear( computeDerivativeGroupLinear_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ = {}, VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = {}) VULKAN_HPP_NOEXCEPT + : computeDerivativeGroupQuads( computeDerivativeGroupQuads_ ), computeDerivativeGroupLinear( computeDerivativeGroupLinear_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV ) - offsetof( PhysicalDeviceComputeShaderDerivativesFeaturesNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceComputeShaderDerivativesFeaturesNV( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceComputeShaderDerivativesFeaturesNV& operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceComputeShaderDerivativesFeaturesNV & operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceComputeShaderDerivativesFeaturesNV & operator=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) ); return *this; } @@ -42535,6 +61582,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -42545,6 +61593,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& ) const = default; +#else bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -42557,38 +61609,53 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads = {}; VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear = {}; + }; static_assert( sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) == sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceComputeShaderDerivativesFeaturesNV; + }; + struct PhysicalDeviceConditionalRenderingFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ = {} ) VULKAN_HPP_NOEXCEPT - : conditionalRendering( conditionalRendering_ ) - , inheritedConditionalRendering( inheritedConditionalRendering_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ = {}, VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ = {}) VULKAN_HPP_NOEXCEPT + : conditionalRendering( conditionalRendering_ ), inheritedConditionalRendering( inheritedConditionalRendering_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT ) - offsetof( PhysicalDeviceConditionalRenderingFeaturesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceConditionalRenderingFeaturesEXT& operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) ); return *this; } @@ -42610,6 +61677,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -42620,6 +61688,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceConditionalRenderingFeaturesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceConditionalRenderingFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -42632,55 +61704,57 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering = {}; VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering = {}; + }; static_assert( sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) == sizeof( VkPhysicalDeviceConditionalRenderingFeaturesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceConditionalRenderingFeaturesEXT; + }; + struct PhysicalDeviceConservativeRasterizationPropertiesEXT { - PhysicalDeviceConservativeRasterizationPropertiesEXT( float primitiveOverestimationSize_ = {}, - float maxExtraPrimitiveOverestimationSize_ = {}, - float extraPrimitiveOverestimationSizeGranularity_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_ = {} ) VULKAN_HPP_NOEXCEPT - : primitiveOverestimationSize( primitiveOverestimationSize_ ) - , maxExtraPrimitiveOverestimationSize( maxExtraPrimitiveOverestimationSize_ ) - , extraPrimitiveOverestimationSizeGranularity( extraPrimitiveOverestimationSizeGranularity_ ) - , primitiveUnderestimation( primitiveUnderestimation_ ) - , conservativePointAndLineRasterization( conservativePointAndLineRasterization_ ) - , degenerateTrianglesRasterized( degenerateTrianglesRasterized_ ) - , degenerateLinesRasterized( degenerateLinesRasterized_ ) - , fullyCoveredFragmentShaderInputVariable( fullyCoveredFragmentShaderInputVariable_ ) - , conservativeRasterizationPostDepthCoverage( conservativeRasterizationPostDepthCoverage_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT(float primitiveOverestimationSize_ = {}, float maxExtraPrimitiveOverestimationSize_ = {}, float extraPrimitiveOverestimationSizeGranularity_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization_ = {}, VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized_ = {}, VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_ = {}) VULKAN_HPP_NOEXCEPT + : primitiveOverestimationSize( primitiveOverestimationSize_ ), maxExtraPrimitiveOverestimationSize( maxExtraPrimitiveOverestimationSize_ ), extraPrimitiveOverestimationSizeGranularity( extraPrimitiveOverestimationSizeGranularity_ ), primitiveUnderestimation( primitiveUnderestimation_ ), conservativePointAndLineRasterization( conservativePointAndLineRasterization_ ), degenerateTrianglesRasterized( degenerateTrianglesRasterized_ ), degenerateLinesRasterized( degenerateLinesRasterized_ ), fullyCoveredFragmentShaderInputVariable( fullyCoveredFragmentShaderInputVariable_ ), conservativeRasterizationPostDepthCoverage( conservativeRasterizationPostDepthCoverage_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT ) - offsetof( PhysicalDeviceConservativeRasterizationPropertiesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceConservativeRasterizationPropertiesEXT( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceConservativeRasterizationPropertiesEXT& operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) ); + return *this; + } + + operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -42691,6 +61765,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceConservativeRasterizationPropertiesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -42710,6 +61788,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT; @@ -42723,32 +61804,44 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized = {}; VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable = {}; VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage = {}; + }; static_assert( sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceConservativeRasterizationPropertiesEXT; + }; + struct PhysicalDeviceCooperativeMatrixFeaturesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {} ) VULKAN_HPP_NOEXCEPT - : cooperativeMatrix( cooperativeMatrix_ ) - , cooperativeMatrixRobustBufferAccess( cooperativeMatrixRobustBufferAccess_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {}, VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {}) VULKAN_HPP_NOEXCEPT + : cooperativeMatrix( cooperativeMatrix_ ), cooperativeMatrixRobustBufferAccess( cooperativeMatrixRobustBufferAccess_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV ) - offsetof( PhysicalDeviceCooperativeMatrixFeaturesNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceCooperativeMatrixFeaturesNV& operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceCooperativeMatrixFeaturesNV ) ); return *this; } @@ -42770,6 +61863,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -42780,6 +61874,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesNV const& ) const = default; +#else bool operator==( PhysicalDeviceCooperativeMatrixFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -42792,39 +61890,57 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix = {}; VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess = {}; + }; static_assert( sizeof( PhysicalDeviceCooperativeMatrixFeaturesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrixFeaturesNV; + }; + struct PhysicalDeviceCooperativeMatrixPropertiesNV { - PhysicalDeviceCooperativeMatrixPropertiesNV( VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {} ) VULKAN_HPP_NOEXCEPT - : cooperativeMatrixSupportedStages( cooperativeMatrixSupportedStages_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV(VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {}) VULKAN_HPP_NOEXCEPT + : cooperativeMatrixSupportedStages( cooperativeMatrixSupportedStages_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV ) - offsetof( PhysicalDeviceCooperativeMatrixPropertiesNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceCooperativeMatrixPropertiesNV& operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceCooperativeMatrixPropertiesNV ) ); + return *this; + } + + operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -42835,6 +61951,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesNV const& ) const = default; +#else bool operator==( PhysicalDeviceCooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -42846,35 +61966,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV; void* pNext = {}; VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages = {}; + }; static_assert( sizeof( PhysicalDeviceCooperativeMatrixPropertiesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrixPropertiesNV; + }; + struct PhysicalDeviceCornerSampledImageFeaturesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ = {} ) VULKAN_HPP_NOEXCEPT - : cornerSampledImage( cornerSampledImage_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ = {}) VULKAN_HPP_NOEXCEPT + : cornerSampledImage( cornerSampledImage_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV ) - offsetof( PhysicalDeviceCornerSampledImageFeaturesNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceCornerSampledImageFeaturesNV& operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCornerSampledImageFeaturesNV & operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceCornerSampledImageFeaturesNV & operator=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) ); return *this; } @@ -42890,6 +62027,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceCornerSampledImageFeaturesNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -42900,6 +62038,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceCornerSampledImageFeaturesNV const& ) const = default; +#else bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -42911,35 +62053,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage = {}; + }; static_assert( sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) == sizeof( VkPhysicalDeviceCornerSampledImageFeaturesNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceCornerSampledImageFeaturesNV; + }; + struct PhysicalDeviceCoverageReductionModeFeaturesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ = {} ) VULKAN_HPP_NOEXCEPT - : coverageReductionMode( coverageReductionMode_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ = {}) VULKAN_HPP_NOEXCEPT + : coverageReductionMode( coverageReductionMode_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV ) - offsetof( PhysicalDeviceCoverageReductionModeFeaturesNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceCoverageReductionModeFeaturesNV& operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceCoverageReductionModeFeaturesNV ) ); return *this; } @@ -42955,6 +62114,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -42965,6 +62125,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceCoverageReductionModeFeaturesNV const& ) const = default; +#else bool operator==( PhysicalDeviceCoverageReductionModeFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -42976,35 +62140,222 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode = {}; + }; static_assert( sizeof( PhysicalDeviceCoverageReductionModeFeaturesNV ) == sizeof( VkPhysicalDeviceCoverageReductionModeFeaturesNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ = {} ) VULKAN_HPP_NOEXCEPT - : dedicatedAllocationImageAliasing( dedicatedAllocationImageAliasing_ ) + using Type = PhysicalDeviceCoverageReductionModeFeaturesNV; + }; + + struct PhysicalDeviceCustomBorderColorFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ = {}, VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ = {}) VULKAN_HPP_NOEXCEPT + : customBorderColors( customBorderColors_ ), customBorderColorWithoutFormat( customBorderColorWithoutFormat_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCustomBorderColorFeaturesEXT( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) - offsetof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, pNext ) ); + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceCustomBorderColorFeaturesEXT ) ); + return *this; + } + + PhysicalDeviceCustomBorderColorFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceCustomBorderColorFeaturesEXT & setCustomBorderColors( VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ ) VULKAN_HPP_NOEXCEPT + { + customBorderColors = customBorderColors_; + return *this; + } + + PhysicalDeviceCustomBorderColorFeaturesEXT & setCustomBorderColorWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ ) VULKAN_HPP_NOEXCEPT + { + customBorderColorWithoutFormat = customBorderColorWithoutFormat_; + return *this; + } + + + operator VkPhysicalDeviceCustomBorderColorFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCustomBorderColorFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceCustomBorderColorFeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceCustomBorderColorFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( customBorderColors == rhs.customBorderColors ) + && ( customBorderColorWithoutFormat == rhs.customBorderColorWithoutFormat ); + } + + bool operator!=( PhysicalDeviceCustomBorderColorFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 customBorderColors = {}; + VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat = {}; + + }; + static_assert( sizeof( PhysicalDeviceCustomBorderColorFeaturesEXT ) == sizeof( VkPhysicalDeviceCustomBorderColorFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceCustomBorderColorFeaturesEXT; + }; + + struct PhysicalDeviceCustomBorderColorPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT(uint32_t maxCustomBorderColorSamplers_ = {}) VULKAN_HPP_NOEXCEPT + : maxCustomBorderColorSamplers( maxCustomBorderColorSamplers_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCustomBorderColorPropertiesEXT( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceCustomBorderColorPropertiesEXT ) ); + return *this; + } + + + operator VkPhysicalDeviceCustomBorderColorPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCustomBorderColorPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceCustomBorderColorPropertiesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceCustomBorderColorPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxCustomBorderColorSamplers == rhs.maxCustomBorderColorSamplers ); + } + + bool operator!=( PhysicalDeviceCustomBorderColorPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT; + void* pNext = {}; + uint32_t maxCustomBorderColorSamplers = {}; + + }; + static_assert( sizeof( PhysicalDeviceCustomBorderColorPropertiesEXT ) == sizeof( VkPhysicalDeviceCustomBorderColorPropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceCustomBorderColorPropertiesEXT; + }; + + struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ = {}) VULKAN_HPP_NOEXCEPT + : dedicatedAllocationImageAliasing( dedicatedAllocationImageAliasing_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV& operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & operator=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) ); return *this; } @@ -43020,6 +62371,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -43030,6 +62382,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& ) const = default; +#else bool operator==( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -43041,35 +62397,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing = {}; + }; static_assert( sizeof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) == sizeof( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + }; + struct PhysicalDeviceDepthClipEnableFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {} ) VULKAN_HPP_NOEXCEPT - : depthClipEnable( depthClipEnable_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {}) VULKAN_HPP_NOEXCEPT + : depthClipEnable( depthClipEnable_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT ) - offsetof( PhysicalDeviceDepthClipEnableFeaturesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceDepthClipEnableFeaturesEXT& operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceDepthClipEnableFeaturesEXT ) ); return *this; } @@ -43085,6 +62458,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -43095,6 +62469,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceDepthClipEnableFeaturesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceDepthClipEnableFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -43106,44 +62484,56 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {}; + }; static_assert( sizeof( PhysicalDeviceDepthClipEnableFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClipEnableFeaturesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceDepthClipEnableFeaturesEXT; + }; + struct PhysicalDeviceDepthStencilResolveProperties { - PhysicalDeviceDepthStencilResolveProperties( VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, - VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {} ) VULKAN_HPP_NOEXCEPT - : supportedDepthResolveModes( supportedDepthResolveModes_ ) - , supportedStencilResolveModes( supportedStencilResolveModes_ ) - , independentResolveNone( independentResolveNone_ ) - , independentResolve( independentResolve_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthStencilResolveProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties(VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}) VULKAN_HPP_NOEXCEPT + : supportedDepthResolveModes( supportedDepthResolveModes_ ), supportedStencilResolveModes( supportedStencilResolveModes_ ), independentResolveNone( independentResolveNone_ ), independentResolve( independentResolve_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties ) - offsetof( PhysicalDeviceDepthStencilResolveProperties, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDepthStencilResolveProperties( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceDepthStencilResolveProperties& operator=( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDepthStencilResolveProperties & operator=( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceDepthStencilResolveProperties & operator=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceDepthStencilResolveProperties ) ); + return *this; + } + + operator VkPhysicalDeviceDepthStencilResolveProperties const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -43154,6 +62544,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceDepthStencilResolveProperties const& ) const = default; +#else bool operator==( PhysicalDeviceDepthStencilResolveProperties const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -43168,6 +62562,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthStencilResolveProperties; @@ -43176,68 +62573,45 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {}; VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {}; VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {}; + }; static_assert( sizeof( PhysicalDeviceDepthStencilResolveProperties ) == sizeof( VkPhysicalDeviceDepthStencilResolveProperties ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceDepthStencilResolveProperties; + }; + using PhysicalDeviceDepthStencilResolvePropertiesKHR = PhysicalDeviceDepthStencilResolveProperties; + struct PhysicalDeviceDescriptorIndexingFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ) - , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ) - , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ) - , shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ) - , shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ) - , shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ) - , shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ) - , shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ) - , shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ) - , shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ) - , descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ) - , descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ) - , descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ) - , descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ) - , descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ) - , descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ) - , descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ) - , descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ) - , descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ) - , runtimeDescriptorArray( runtimeDescriptorArray_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}) VULKAN_HPP_NOEXCEPT + : shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ), shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ), shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ), shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ), shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ), shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ), shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ), shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ), shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ), shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ), descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ), descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ), descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ), descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ), descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ), descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ), descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ), descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ), descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ), runtimeDescriptorArray( runtimeDescriptorArray_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures ) - offsetof( PhysicalDeviceDescriptorIndexingFeatures, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDescriptorIndexingFeatures( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceDescriptorIndexingFeatures& operator=( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingFeatures & operator=( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & operator=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceDescriptorIndexingFeatures ) ); return *this; } @@ -43367,6 +62741,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceDescriptorIndexingFeatures const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -43377,6 +62752,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceDescriptorIndexingFeatures const& ) const = default; +#else bool operator==( PhysicalDeviceDescriptorIndexingFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -43407,6 +62786,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures; @@ -43431,77 +62813,49 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {}; VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {}; VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {}; + }; static_assert( sizeof( PhysicalDeviceDescriptorIndexingFeatures ) == sizeof( VkPhysicalDeviceDescriptorIndexingFeatures ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorIndexingFeatures; + }; + using PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures; + struct PhysicalDeviceDescriptorIndexingProperties { - PhysicalDeviceDescriptorIndexingProperties( uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, - uint32_t maxPerStageUpdateAfterBindResources_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {} ) VULKAN_HPP_NOEXCEPT - : maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ) - , shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ) - , shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ) - , shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ) - , shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ) - , shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ) - , robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ) - , quadDivergentImplicitLod( quadDivergentImplicitLod_ ) - , maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ) - , maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ) - , maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ) - , maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ) - , maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ) - , maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ) - , maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ) - , maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ) - , maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ) - , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ) - , maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ) - , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ) - , maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ) - , maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ) - , maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties(uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, uint32_t maxPerStageUpdateAfterBindResources_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}) VULKAN_HPP_NOEXCEPT + : maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ), shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ), shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ), shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ), shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ), shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ), robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ), quadDivergentImplicitLod( quadDivergentImplicitLod_ ), maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ), maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ), maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ), maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ), maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ), maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ), maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ), maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ), maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ), maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ), maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ), maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ), maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ), maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ), maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties ) - offsetof( PhysicalDeviceDescriptorIndexingProperties, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDescriptorIndexingProperties( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceDescriptorIndexingProperties& operator=( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDescriptorIndexingProperties & operator=( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceDescriptorIndexingProperties & operator=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceDescriptorIndexingProperties ) ); + return *this; + } + + operator VkPhysicalDeviceDescriptorIndexingProperties const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -43512,6 +62866,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceDescriptorIndexingProperties const& ) const = default; +#else bool operator==( PhysicalDeviceDescriptorIndexingProperties const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -43545,6 +62903,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingProperties; @@ -43572,33 +62933,314 @@ namespace VULKAN_HPP_NAMESPACE uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {}; uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {}; uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {}; + }; static_assert( sizeof( PhysicalDeviceDescriptorIndexingProperties ) == sizeof( VkPhysicalDeviceDescriptorIndexingProperties ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct PhysicalDeviceDiscardRectanglePropertiesEXT + template <> + struct CppType { - PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = {} ) VULKAN_HPP_NOEXCEPT - : maxDiscardRectangles( maxDiscardRectangles_ ) + using Type = PhysicalDeviceDescriptorIndexingProperties; + }; + using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties; + + struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ = {}) VULKAN_HPP_NOEXCEPT + : deviceGeneratedCommands( deviceGeneratedCommands_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT ) - offsetof( PhysicalDeviceDiscardRectanglePropertiesEXT, pNext ) ); + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV ) ); + return *this; + } + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setDeviceGeneratedCommands( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ ) VULKAN_HPP_NOEXCEPT + { + deviceGeneratedCommands = deviceGeneratedCommands_; + return *this; + } + + + operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const& ) const = default; +#else + bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( deviceGeneratedCommands == rhs.deviceGeneratedCommands ); + } + + bool operator!=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands = {}; + + }; + static_assert( sizeof( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV ) == sizeof( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + }; + + struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV(uint32_t maxGraphicsShaderGroupCount_ = {}, uint32_t maxIndirectSequenceCount_ = {}, uint32_t maxIndirectCommandsTokenCount_ = {}, uint32_t maxIndirectCommandsStreamCount_ = {}, uint32_t maxIndirectCommandsTokenOffset_ = {}, uint32_t maxIndirectCommandsStreamStride_ = {}, uint32_t minSequencesCountBufferOffsetAlignment_ = {}, uint32_t minSequencesIndexBufferOffsetAlignment_ = {}, uint32_t minIndirectCommandsBufferOffsetAlignment_ = {}) VULKAN_HPP_NOEXCEPT + : maxGraphicsShaderGroupCount( maxGraphicsShaderGroupCount_ ), maxIndirectSequenceCount( maxIndirectSequenceCount_ ), maxIndirectCommandsTokenCount( maxIndirectCommandsTokenCount_ ), maxIndirectCommandsStreamCount( maxIndirectCommandsStreamCount_ ), maxIndirectCommandsTokenOffset( maxIndirectCommandsTokenOffset_ ), maxIndirectCommandsStreamStride( maxIndirectCommandsStreamStride_ ), minSequencesCountBufferOffsetAlignment( minSequencesCountBufferOffsetAlignment_ ), minSequencesIndexBufferOffsetAlignment( minSequencesIndexBufferOffsetAlignment_ ), minIndirectCommandsBufferOffsetAlignment( minIndirectCommandsBufferOffsetAlignment_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV ) ); + return *this; + } + + + operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const& ) const = default; +#else + bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxGraphicsShaderGroupCount == rhs.maxGraphicsShaderGroupCount ) + && ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount ) + && ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount ) + && ( maxIndirectCommandsStreamCount == rhs.maxIndirectCommandsStreamCount ) + && ( maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset ) + && ( maxIndirectCommandsStreamStride == rhs.maxIndirectCommandsStreamStride ) + && ( minSequencesCountBufferOffsetAlignment == rhs.minSequencesCountBufferOffsetAlignment ) + && ( minSequencesIndexBufferOffsetAlignment == rhs.minSequencesIndexBufferOffsetAlignment ) + && ( minIndirectCommandsBufferOffsetAlignment == rhs.minIndirectCommandsBufferOffsetAlignment ); + } + + bool operator!=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + void* pNext = {}; + uint32_t maxGraphicsShaderGroupCount = {}; + uint32_t maxIndirectSequenceCount = {}; + uint32_t maxIndirectCommandsTokenCount = {}; + uint32_t maxIndirectCommandsStreamCount = {}; + uint32_t maxIndirectCommandsTokenOffset = {}; + uint32_t maxIndirectCommandsStreamStride = {}; + uint32_t minSequencesCountBufferOffsetAlignment = {}; + uint32_t minSequencesIndexBufferOffsetAlignment = {}; + uint32_t minIndirectCommandsBufferOffsetAlignment = {}; + + }; + static_assert( sizeof( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV ) == sizeof( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + }; + + struct PhysicalDeviceDiagnosticsConfigFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ = {}) VULKAN_HPP_NOEXCEPT + : diagnosticsConfig( diagnosticsConfig_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDiagnosticsConfigFeaturesNV( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceDiagnosticsConfigFeaturesNV ) ); + return *this; + } + + PhysicalDeviceDiagnosticsConfigFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceDiagnosticsConfigFeaturesNV & setDiagnosticsConfig( VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ ) VULKAN_HPP_NOEXCEPT + { + diagnosticsConfig = diagnosticsConfig_; + return *this; + } + + + operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceDiagnosticsConfigFeaturesNV const& ) const = default; +#else + bool operator==( PhysicalDeviceDiagnosticsConfigFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( diagnosticsConfig == rhs.diagnosticsConfig ); + } + + bool operator!=( PhysicalDeviceDiagnosticsConfigFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig = {}; + + }; + static_assert( sizeof( PhysicalDeviceDiagnosticsConfigFeaturesNV ) == sizeof( VkPhysicalDeviceDiagnosticsConfigFeaturesNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDiagnosticsConfigFeaturesNV; + }; + + struct PhysicalDeviceDiscardRectanglePropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT(uint32_t maxDiscardRectangles_ = {}) VULKAN_HPP_NOEXCEPT + : maxDiscardRectangles( maxDiscardRectangles_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceDiscardRectanglePropertiesEXT& operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) ); + return *this; + } + + operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -43609,6 +63251,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceDiscardRectanglePropertiesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -43620,47 +63266,56 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; void* pNext = {}; uint32_t maxDiscardRectangles = {}; + }; static_assert( sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) == sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceDiscardRectanglePropertiesEXT; + }; + struct PhysicalDeviceDriverProperties { - PhysicalDeviceDriverProperties( VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, - std::array const& driverName_ = {}, - std::array const& driverInfo_ = {}, - VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {} ) VULKAN_HPP_NOEXCEPT - : driverID( driverID_ ) - , driverName{} - , driverInfo{} - , conformanceVersion( conformanceVersion_ ) - { - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( driverName, driverName_ ); - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( driverInfo, driverInfo_ ); - } + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDriverProperties; - VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties ) - offsetof( PhysicalDeviceDriverProperties, pNext ) ); - return *this; - } +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties(VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, std::array const& driverName_ = {}, std::array const& driverInfo_ = {}, VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}) VULKAN_HPP_NOEXCEPT + : driverID( driverID_ ), driverName( driverName_ ), driverInfo( driverInfo_ ), conformanceVersion( conformanceVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDriverProperties( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceDriverProperties& operator=( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDriverProperties & operator=( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceDriverProperties & operator=( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceDriverProperties ) ); + return *this; + } + + operator VkPhysicalDeviceDriverProperties const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -43671,13 +63326,17 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceDriverProperties const& ) const = default; +#else bool operator==( PhysicalDeviceDriverProperties const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) - && ( memcmp( driverName, rhs.driverName, VK_MAX_DRIVER_NAME_SIZE * sizeof( char ) ) == 0 ) - && ( memcmp( driverInfo, rhs.driverInfo, VK_MAX_DRIVER_INFO_SIZE * sizeof( char ) ) == 0 ) + && ( driverName == rhs.driverName ) + && ( driverInfo == rhs.driverInfo ) && ( conformanceVersion == rhs.conformanceVersion ); } @@ -43685,38 +63344,56 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverProperties; void* pNext = {}; VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary; - char driverName[VK_MAX_DRIVER_NAME_SIZE] = {}; - char driverInfo[VK_MAX_DRIVER_INFO_SIZE] = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverInfo = {}; VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {}; + }; static_assert( sizeof( PhysicalDeviceDriverProperties ) == sizeof( VkPhysicalDeviceDriverProperties ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceDriverProperties; + }; + using PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties; + struct PhysicalDeviceExclusiveScissorFeaturesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ = {} ) VULKAN_HPP_NOEXCEPT - : exclusiveScissor( exclusiveScissor_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ = {}) VULKAN_HPP_NOEXCEPT + : exclusiveScissor( exclusiveScissor_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV ) - offsetof( PhysicalDeviceExclusiveScissorFeaturesNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceExclusiveScissorFeaturesNV& operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExclusiveScissorFeaturesNV & operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceExclusiveScissorFeaturesNV & operator=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) ); return *this; } @@ -43732,6 +63409,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceExclusiveScissorFeaturesNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -43742,6 +63420,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceExclusiveScissorFeaturesNV const& ) const = default; +#else bool operator==( PhysicalDeviceExclusiveScissorFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -43753,185 +63435,139 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor = {}; + }; static_assert( sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) == sizeof( VkPhysicalDeviceExclusiveScissorFeaturesNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct PhysicalDeviceExternalBufferInfo + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , usage( usage_ ) - , handleType( handleType_ ) + using Type = PhysicalDeviceExclusiveScissorFeaturesNV; + }; + + struct PhysicalDeviceExtendedDynamicStateFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ = {}) VULKAN_HPP_NOEXCEPT + : extendedDynamicState( extendedDynamicState_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo ) - offsetof( PhysicalDeviceExternalBufferInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExtendedDynamicStateFeaturesEXT( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceExternalBufferInfo& operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceExternalBufferInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceExtendedDynamicStateFeaturesEXT ) ); + return *this; + } + + PhysicalDeviceExtendedDynamicStateFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceExternalBufferInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExtendedDynamicStateFeaturesEXT & setExtendedDynamicState( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + extendedDynamicState = extendedDynamicState_; return *this; } - PhysicalDeviceExternalBufferInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + + operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT { - usage = usage_; - return *this; + return *reinterpret_cast( this ); } - PhysicalDeviceExternalBufferInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - handleType = handleType_; - return *this; + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceExternalBufferInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - operator VkPhysicalDeviceExternalBufferInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceExternalBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceExtendedDynamicStateFeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceExtendedDynamicStateFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( usage == rhs.usage ) - && ( handleType == rhs.handleType ); + && ( extendedDynamicState == rhs.extendedDynamicState ); } - bool operator!=( PhysicalDeviceExternalBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif + + public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; - }; - static_assert( sizeof( PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState = {}; - struct PhysicalDeviceExternalFenceInfo + }; + static_assert( sizeof( PhysicalDeviceExtendedDynamicStateFeaturesEXT ) == sizeof( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT - : handleType( handleType_ ) - {} - - VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo ) - offsetof( PhysicalDeviceExternalFenceInfo, pNext ) ); - return *this; - } - - PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceExternalFenceInfo& operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceExternalFenceInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceExternalFenceInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - operator VkPhysicalDeviceExternalFenceInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceExternalFenceInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceExternalFenceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleType == rhs.handleType ); - } - - bool operator!=( PhysicalDeviceExternalFenceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + using Type = PhysicalDeviceExtendedDynamicStateFeaturesEXT; }; - static_assert( sizeof( PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PhysicalDeviceExternalImageFormatInfo { - VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT - : handleType( handleType_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalImageFormatInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo ) - offsetof( PhysicalDeviceExternalImageFormatInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceExternalImageFormatInfo& operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExternalImageFormatInfo & operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceExternalImageFormatInfo & operator=( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceExternalImageFormatInfo ) ); return *this; } @@ -43947,6 +63583,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceExternalImageFormatInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -43957,6 +63594,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceExternalImageFormatInfo const& ) const = default; +#else bool operator==( PhysicalDeviceExternalImageFormatInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -43968,38 +63609,57 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo; const void* pNext = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; static_assert( sizeof( PhysicalDeviceExternalImageFormatInfo ) == sizeof( VkPhysicalDeviceExternalImageFormatInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceExternalImageFormatInfo; + }; + using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo; + struct PhysicalDeviceExternalMemoryHostPropertiesEXT { - PhysicalDeviceExternalMemoryHostPropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = {} ) VULKAN_HPP_NOEXCEPT - : minImportedHostPointerAlignment( minImportedHostPointerAlignment_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT(VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = {}) VULKAN_HPP_NOEXCEPT + : minImportedHostPointerAlignment( minImportedHostPointerAlignment_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT ) - offsetof( PhysicalDeviceExternalMemoryHostPropertiesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceExternalMemoryHostPropertiesEXT& operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) ); + return *this; + } + + operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -44010,6 +63670,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceExternalMemoryHostPropertiesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -44021,200 +63685,56 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT; void* pNext = {}; VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment = {}; + }; static_assert( sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) == sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct PhysicalDeviceExternalSemaphoreInfo + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT - : handleType( handleType_ ) - {} - - VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo ) - offsetof( PhysicalDeviceExternalSemaphoreInfo, pNext ) ); - return *this; - } - - PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceExternalSemaphoreInfo& operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceExternalSemaphoreInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceExternalSemaphoreInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - operator VkPhysicalDeviceExternalSemaphoreInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceExternalSemaphoreInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleType == rhs.handleType ); - } - - bool operator!=( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + using Type = PhysicalDeviceExternalMemoryHostPropertiesEXT; }; - static_assert( sizeof( PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct PhysicalDeviceFeatures2 - { - VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = {} ) VULKAN_HPP_NOEXCEPT - : features( features_ ) - {} - - VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 ) - offsetof( PhysicalDeviceFeatures2, pNext ) ); - return *this; - } - - PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceFeatures2& operator=( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceFeatures2 & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceFeatures2 & setFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ ) VULKAN_HPP_NOEXCEPT - { - features = features_; - return *this; - } - - operator VkPhysicalDeviceFeatures2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceFeatures2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceFeatures2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( features == rhs.features ); - } - - bool operator!=( PhysicalDeviceFeatures2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFeatures2; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features = {}; - }; - static_assert( sizeof( PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PhysicalDeviceFloatControlsProperties { - PhysicalDeviceFloatControlsProperties( VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {} ) VULKAN_HPP_NOEXCEPT - : denormBehaviorIndependence( denormBehaviorIndependence_ ) - , roundingModeIndependence( roundingModeIndependence_ ) - , shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ) - , shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ) - , shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ) - , shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ) - , shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ) - , shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ) - , shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ) - , shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ) - , shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ) - , shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ) - , shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ) - , shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ) - , shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ) - , shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ) - , shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFloatControlsProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties(VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}) VULKAN_HPP_NOEXCEPT + : denormBehaviorIndependence( denormBehaviorIndependence_ ), roundingModeIndependence( roundingModeIndependence_ ), shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ), shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ), shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ), shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ), shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ), shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ), shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ), shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ), shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ), shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ), shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ), shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ), shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ), shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ), shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties ) - offsetof( PhysicalDeviceFloatControlsProperties, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFloatControlsProperties( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceFloatControlsProperties& operator=( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFloatControlsProperties & operator=( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceFloatControlsProperties & operator=( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceFloatControlsProperties ) ); + return *this; + } + + operator VkPhysicalDeviceFloatControlsProperties const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -44225,6 +63745,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceFloatControlsProperties const& ) const = default; +#else bool operator==( PhysicalDeviceFloatControlsProperties const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -44252,6 +63776,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFloatControlsProperties; @@ -44273,37 +63800,241 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {}; + }; static_assert( sizeof( PhysicalDeviceFloatControlsProperties ) == sizeof( VkPhysicalDeviceFloatControlsProperties ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct PhysicalDeviceFragmentDensityMapFeaturesEXT + template <> + struct CppType { - PhysicalDeviceFragmentDensityMapFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ = {} ) VULKAN_HPP_NOEXCEPT - : fragmentDensityMap( fragmentDensityMap_ ) - , fragmentDensityMapDynamic( fragmentDensityMapDynamic_ ) - , fragmentDensityMapNonSubsampledImages( fragmentDensityMapNonSubsampledImages_ ) + using Type = PhysicalDeviceFloatControlsProperties; + }; + using PhysicalDeviceFloatControlsPropertiesKHR = PhysicalDeviceFloatControlsProperties; + + struct PhysicalDeviceFragmentDensityMap2FeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ = {}) VULKAN_HPP_NOEXCEPT + : fragmentDensityMapDeferred( fragmentDensityMapDeferred_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMap2FeaturesEXT( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT ) - offsetof( PhysicalDeviceFragmentDensityMapFeaturesEXT, pNext ) ); + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDeviceFragmentDensityMap2FeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceFragmentDensityMap2FeaturesEXT & operator=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceFragmentDensityMap2FeaturesEXT ) ); + return *this; + } + + PhysicalDeviceFragmentDensityMap2FeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceFragmentDensityMap2FeaturesEXT & setFragmentDensityMapDeferred( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapDeferred = fragmentDensityMapDeferred_; + return *this; + } + + + operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceFragmentDensityMap2FeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMap2FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( fragmentDensityMapDeferred == rhs.fragmentDensityMapDeferred ); + } + + bool operator!=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred = {}; + + }; + static_assert( sizeof( PhysicalDeviceFragmentDensityMap2FeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMap2FeaturesEXT; + }; + + struct PhysicalDeviceFragmentDensityMap2PropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess_ = {}, uint32_t maxSubsampledArrayLayers_ = {}, uint32_t maxDescriptorSetSubsampledSamplers_ = {}) VULKAN_HPP_NOEXCEPT + : subsampledLoads( subsampledLoads_ ), subsampledCoarseReconstructionEarlyAccess( subsampledCoarseReconstructionEarlyAccess_ ), maxSubsampledArrayLayers( maxSubsampledArrayLayers_ ), maxDescriptorSetSubsampledSamplers( maxDescriptorSetSubsampledSamplers_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMap2PropertiesEXT( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDeviceFragmentDensityMap2PropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceFragmentDensityMap2PropertiesEXT & operator=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceFragmentDensityMap2PropertiesEXT ) ); + return *this; + } + + + operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceFragmentDensityMap2PropertiesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMap2PropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( subsampledLoads == rhs.subsampledLoads ) + && ( subsampledCoarseReconstructionEarlyAccess == rhs.subsampledCoarseReconstructionEarlyAccess ) + && ( maxSubsampledArrayLayers == rhs.maxSubsampledArrayLayers ) + && ( maxDescriptorSetSubsampledSamplers == rhs.maxDescriptorSetSubsampledSamplers ); + } + + bool operator!=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads = {}; + VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess = {}; + uint32_t maxSubsampledArrayLayers = {}; + uint32_t maxDescriptorSetSubsampledSamplers = {}; + + }; + static_assert( sizeof( PhysicalDeviceFragmentDensityMap2PropertiesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMap2PropertiesEXT; + }; + + struct PhysicalDeviceFragmentDensityMapFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ = {}) VULKAN_HPP_NOEXCEPT + : fragmentDensityMap( fragmentDensityMap_ ), fragmentDensityMapDynamic( fragmentDensityMapDynamic_ ), fragmentDensityMapNonSubsampledImages( fragmentDensityMapNonSubsampledImages_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceFragmentDensityMapFeaturesEXT( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceFragmentDensityMapFeaturesEXT& operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceFragmentDensityMapFeaturesEXT ) ); + return *this; + } + + PhysicalDeviceFragmentDensityMapFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMap( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMap = fragmentDensityMap_; + return *this; + } + + PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMapDynamic( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapDynamic = fragmentDensityMapDynamic_; + return *this; + } + + PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMapNonSubsampledImages( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapNonSubsampledImages = fragmentDensityMapNonSubsampledImages_; + return *this; + } + + operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -44314,6 +64045,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceFragmentDensityMapFeaturesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceFragmentDensityMapFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -44327,6 +64062,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT; @@ -44334,37 +64072,48 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap = {}; VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic = {}; VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages = {}; + }; static_assert( sizeof( PhysicalDeviceFragmentDensityMapFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapFeaturesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMapFeaturesEXT; + }; + struct PhysicalDeviceFragmentDensityMapPropertiesEXT { - PhysicalDeviceFragmentDensityMapPropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations_ = {} ) VULKAN_HPP_NOEXCEPT - : minFragmentDensityTexelSize( minFragmentDensityTexelSize_ ) - , maxFragmentDensityTexelSize( maxFragmentDensityTexelSize_ ) - , fragmentDensityInvocations( fragmentDensityInvocations_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT(VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations_ = {}) VULKAN_HPP_NOEXCEPT + : minFragmentDensityTexelSize( minFragmentDensityTexelSize_ ), maxFragmentDensityTexelSize( maxFragmentDensityTexelSize_ ), fragmentDensityInvocations( fragmentDensityInvocations_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT ) - offsetof( PhysicalDeviceFragmentDensityMapPropertiesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentDensityMapPropertiesEXT( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceFragmentDensityMapPropertiesEXT& operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceFragmentDensityMapPropertiesEXT ) ); + return *this; + } + + operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -44375,6 +64124,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceFragmentDensityMapPropertiesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceFragmentDensityMapPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -44388,6 +64141,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT; @@ -44395,30 +64151,44 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize = {}; VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize = {}; VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations = {}; + }; static_assert( sizeof( PhysicalDeviceFragmentDensityMapPropertiesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapPropertiesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMapPropertiesEXT; + }; + struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ = {} ) VULKAN_HPP_NOEXCEPT - : fragmentShaderBarycentric( fragmentShaderBarycentric_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ = {}) VULKAN_HPP_NOEXCEPT + : fragmentShaderBarycentric( fragmentShaderBarycentric_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) - offsetof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceFragmentShaderBarycentricFeaturesNV& operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFragmentShaderBarycentricFeaturesNV & operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceFragmentShaderBarycentricFeaturesNV & operator=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) ); return *this; } @@ -44434,6 +64204,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -44444,6 +64215,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& ) const = default; +#else bool operator==( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -44455,39 +64230,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric = {}; + }; static_assert( sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) == sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShaderBarycentricFeaturesNV; + }; + struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = {} ) VULKAN_HPP_NOEXCEPT - : fragmentShaderSampleInterlock( fragmentShaderSampleInterlock_ ) - , fragmentShaderPixelInterlock( fragmentShaderPixelInterlock_ ) - , fragmentShaderShadingRateInterlock( fragmentShaderShadingRateInterlock_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = {}) VULKAN_HPP_NOEXCEPT + : fragmentShaderSampleInterlock( fragmentShaderSampleInterlock_ ), fragmentShaderPixelInterlock( fragmentShaderPixelInterlock_ ), fragmentShaderShadingRateInterlock( fragmentShaderShadingRateInterlock_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) - offsetof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceFragmentShaderInterlockFeaturesEXT& operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) ); return *this; } @@ -44515,6 +64303,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -44525,6 +64314,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -44538,6 +64331,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT; @@ -44545,39 +64341,48 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock = {}; VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock = {}; VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock = {}; + }; static_assert( sizeof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShaderInterlockFeaturesEXT; + }; + struct PhysicalDeviceGroupProperties { - PhysicalDeviceGroupProperties( uint32_t physicalDeviceCount_ = {}, - std::array const& physicalDevices_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {} ) VULKAN_HPP_NOEXCEPT - : physicalDeviceCount( physicalDeviceCount_ ) - , physicalDevices{} - , subsetAllocation( subsetAllocation_ ) - { - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( physicalDevices, physicalDevices_ ); - } + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGroupProperties; - VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties ) - offsetof( PhysicalDeviceGroupProperties, pNext ) ); - return *this; - } +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties(uint32_t physicalDeviceCount_ = {}, std::array const& physicalDevices_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {}) VULKAN_HPP_NOEXCEPT + : physicalDeviceCount( physicalDeviceCount_ ), physicalDevices( physicalDevices_ ), subsetAllocation( subsetAllocation_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceGroupProperties( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceGroupProperties& operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceGroupProperties & operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceGroupProperties & operator=( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceGroupProperties ) ); + return *this; + } + + operator VkPhysicalDeviceGroupProperties const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -44588,12 +64393,16 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceGroupProperties const& ) const = default; +#else bool operator==( PhysicalDeviceGroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) - && ( memcmp( physicalDevices, rhs.physicalDevices, std::min( VK_MAX_DEVICE_GROUP_SIZE, physicalDeviceCount ) * sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) ) == 0 ) + && ( physicalDevices == rhs.physicalDevices ) && ( subsetAllocation == rhs.subsetAllocation ); } @@ -44601,37 +64410,55 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGroupProperties; void* pNext = {}; uint32_t physicalDeviceCount = {}; - VULKAN_HPP_NAMESPACE::PhysicalDevice physicalDevices[VK_MAX_DEVICE_GROUP_SIZE] = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D physicalDevices = {}; VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation = {}; + }; static_assert( sizeof( PhysicalDeviceGroupProperties ) == sizeof( VkPhysicalDeviceGroupProperties ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceGroupProperties; + }; + using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties; + struct PhysicalDeviceHostQueryResetFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {} ) VULKAN_HPP_NOEXCEPT - : hostQueryReset( hostQueryReset_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostQueryResetFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures(VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}) VULKAN_HPP_NOEXCEPT + : hostQueryReset( hostQueryReset_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures ) - offsetof( PhysicalDeviceHostQueryResetFeatures, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceHostQueryResetFeatures( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceHostQueryResetFeatures& operator=( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceHostQueryResetFeatures & operator=( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceHostQueryResetFeatures & operator=( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceHostQueryResetFeatures ) ); return *this; } @@ -44647,6 +64474,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceHostQueryResetFeatures const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -44657,6 +64485,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceHostQueryResetFeatures const& ) const = default; +#else bool operator==( PhysicalDeviceHostQueryResetFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -44668,50 +64500,57 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostQueryResetFeatures; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {}; + }; static_assert( sizeof( PhysicalDeviceHostQueryResetFeatures ) == sizeof( VkPhysicalDeviceHostQueryResetFeatures ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceHostQueryResetFeatures; + }; + using PhysicalDeviceHostQueryResetFeaturesEXT = PhysicalDeviceHostQueryResetFeatures; + struct PhysicalDeviceIDProperties { - PhysicalDeviceIDProperties( std::array const& deviceUUID_ = {}, - std::array const& driverUUID_ = {}, - std::array const& deviceLUID_ = {}, - uint32_t deviceNodeMask_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {} ) VULKAN_HPP_NOEXCEPT - : deviceUUID{} - , driverUUID{} - , deviceLUID{} - , deviceNodeMask( deviceNodeMask_ ) - , deviceLUIDValid( deviceLUIDValid_ ) - { - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( deviceUUID, deviceUUID_ ); - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( driverUUID, driverUUID_ ); - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( deviceLUID, deviceLUID_ ); - } + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIdProperties; - VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties ) - offsetof( PhysicalDeviceIDProperties, pNext ) ); - return *this; - } +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties(std::array const& deviceUUID_ = {}, std::array const& driverUUID_ = {}, std::array const& deviceLUID_ = {}, uint32_t deviceNodeMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}) VULKAN_HPP_NOEXCEPT + : deviceUUID( deviceUUID_ ), driverUUID( driverUUID_ ), deviceLUID( deviceLUID_ ), deviceNodeMask( deviceNodeMask_ ), deviceLUIDValid( deviceLUIDValid_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceIDProperties( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceIDProperties& operator=( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceIDProperties & operator=( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceIDProperties & operator=( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceIDProperties ) ); + return *this; + } + + operator VkPhysicalDeviceIDProperties const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -44722,13 +64561,17 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceIDProperties const& ) const = default; +#else bool operator==( PhysicalDeviceIDProperties const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) - && ( memcmp( deviceUUID, rhs.deviceUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 ) - && ( memcmp( driverUUID, rhs.driverUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 ) - && ( memcmp( deviceLUID, rhs.deviceLUID, VK_LUID_SIZE * sizeof( uint8_t ) ) == 0 ) + && ( deviceUUID == rhs.deviceUUID ) + && ( driverUUID == rhs.driverUUID ) + && ( deviceLUID == rhs.deviceLUID ) && ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid ); } @@ -44737,45 +64580,63 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIdProperties; void* pNext = {}; - uint8_t deviceUUID[VK_UUID_SIZE] = {}; - uint8_t driverUUID[VK_UUID_SIZE] = {}; - uint8_t deviceLUID[VK_LUID_SIZE] = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceLUID = {}; uint32_t deviceNodeMask = {}; VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {}; + }; static_assert( sizeof( PhysicalDeviceIDProperties ) == sizeof( VkPhysicalDeviceIDProperties ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceIDProperties; + }; + using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties; + struct PhysicalDeviceImageDrmFormatModifierInfoEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_ = {}, - VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, - uint32_t queueFamilyIndexCount_ = {}, - const uint32_t* pQueueFamilyIndices_ = {} ) VULKAN_HPP_NOEXCEPT - : drmFormatModifier( drmFormatModifier_ ) - , sharingMode( sharingMode_ ) - , queueFamilyIndexCount( queueFamilyIndexCount_ ) - , pQueueFamilyIndices( pQueueFamilyIndices_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT(uint64_t drmFormatModifier_ = {}, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t* pQueueFamilyIndices_ = {}) VULKAN_HPP_NOEXCEPT + : drmFormatModifier( drmFormatModifier_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( queueFamilyIndexCount_ ), pQueueFamilyIndices( pQueueFamilyIndices_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT ) - offsetof( PhysicalDeviceImageDrmFormatModifierInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PhysicalDeviceImageDrmFormatModifierInfoEXT& operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) + : drmFormatModifier( drmFormatModifier_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) ); return *this; } @@ -44809,6 +64670,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); + pQueueFamilyIndices = queueFamilyIndices_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -44819,6 +64690,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceImageDrmFormatModifierInfoEXT const& ) const = default; +#else bool operator==( PhysicalDeviceImageDrmFormatModifierInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -44833,6 +64708,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT; @@ -44841,135 +64719,131 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; uint32_t queueFamilyIndexCount = {}; const uint32_t* pQueueFamilyIndices = {}; + }; static_assert( sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) == sizeof( VkPhysicalDeviceImageDrmFormatModifierInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct PhysicalDeviceImageFormatInfo2 + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, - VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, - VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT - : format( format_ ) - , type( type_ ) - , tiling( tiling_ ) - , usage( usage_ ) - , flags( flags_ ) + using Type = PhysicalDeviceImageDrmFormatModifierInfoEXT; + }; + + struct PhysicalDeviceImageRobustnessFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageRobustnessFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {}) VULKAN_HPP_NOEXCEPT + : robustImageAccess( robustImageAccess_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 ) - offsetof( PhysicalDeviceImageFormatInfo2, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeaturesEXT( PhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceImageRobustnessFeaturesEXT( VkPhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceImageFormatInfo2& operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceImageRobustnessFeaturesEXT & operator=( VkPhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceImageFormatInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceImageRobustnessFeaturesEXT & operator=( PhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceImageRobustnessFeaturesEXT ) ); + return *this; + } + + PhysicalDeviceImageRobustnessFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceImageRobustnessFeaturesEXT & setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT { - format = format_; + robustImageAccess = robustImageAccess_; return *this; } - PhysicalDeviceImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT + + operator VkPhysicalDeviceImageRobustnessFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT { - type = type_; - return *this; + return *reinterpret_cast( this ); } - PhysicalDeviceImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceImageRobustnessFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - tiling = tiling_; - return *this; + return *reinterpret_cast( this ); } - PhysicalDeviceImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT - { - usage = usage_; - return *this; - } - PhysicalDeviceImageFormatInfo2 & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - operator VkPhysicalDeviceImageFormatInfo2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceImageRobustnessFeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceImageRobustnessFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) - && ( format == rhs.format ) - && ( type == rhs.type ) - && ( tiling == rhs.tiling ) - && ( usage == rhs.usage ) - && ( flags == rhs.flags ); + && ( robustImageAccess == rhs.robustImageAccess ); } - bool operator!=( PhysicalDeviceImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceImageRobustnessFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif + + public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D; - VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; - VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageRobustnessFeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {}; + + }; + static_assert( sizeof( PhysicalDeviceImageRobustnessFeaturesEXT ) == sizeof( VkPhysicalDeviceImageRobustnessFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceImageRobustnessFeaturesEXT; }; - static_assert( sizeof( PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PhysicalDeviceImageViewImageFormatInfoEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D ) VULKAN_HPP_NOEXCEPT - : imageViewType( imageViewType_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT(VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D) VULKAN_HPP_NOEXCEPT + : imageViewType( imageViewType_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT ) - offsetof( PhysicalDeviceImageViewImageFormatInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceImageViewImageFormatInfoEXT& operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceImageViewImageFormatInfoEXT & operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceImageViewImageFormatInfoEXT & operator=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceImageViewImageFormatInfoEXT ) ); return *this; } @@ -44985,6 +64859,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceImageViewImageFormatInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -44995,6 +64870,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceImageViewImageFormatInfoEXT const& ) const = default; +#else bool operator==( PhysicalDeviceImageViewImageFormatInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -45006,35 +64885,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT; void* pNext = {}; VULKAN_HPP_NAMESPACE::ImageViewType imageViewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D; + }; static_assert( sizeof( PhysicalDeviceImageViewImageFormatInfoEXT ) == sizeof( VkPhysicalDeviceImageViewImageFormatInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceImageViewImageFormatInfoEXT; + }; + struct PhysicalDeviceImagelessFramebufferFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {} ) VULKAN_HPP_NOEXCEPT - : imagelessFramebuffer( imagelessFramebuffer_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures(VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}) VULKAN_HPP_NOEXCEPT + : imagelessFramebuffer( imagelessFramebuffer_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures ) - offsetof( PhysicalDeviceImagelessFramebufferFeatures, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImagelessFramebufferFeatures( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceImagelessFramebufferFeatures& operator=( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceImagelessFramebufferFeatures & operator=( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceImagelessFramebufferFeatures & operator=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceImagelessFramebufferFeatures ) ); return *this; } @@ -45050,6 +64946,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceImagelessFramebufferFeatures const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -45060,6 +64957,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceImagelessFramebufferFeatures const& ) const = default; +#else bool operator==( PhysicalDeviceImagelessFramebufferFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -45071,35 +64972,53 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {}; + }; static_assert( sizeof( PhysicalDeviceImagelessFramebufferFeatures ) == sizeof( VkPhysicalDeviceImagelessFramebufferFeatures ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceImagelessFramebufferFeatures; + }; + using PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures; + struct PhysicalDeviceIndexTypeUint8FeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {} ) VULKAN_HPP_NOEXCEPT - : indexTypeUint8( indexTypeUint8_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {}) VULKAN_HPP_NOEXCEPT + : indexTypeUint8( indexTypeUint8_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT ) - offsetof( PhysicalDeviceIndexTypeUint8FeaturesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceIndexTypeUint8FeaturesEXT( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceIndexTypeUint8FeaturesEXT& operator=( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceIndexTypeUint8FeaturesEXT & operator=( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceIndexTypeUint8FeaturesEXT & operator=( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceIndexTypeUint8FeaturesEXT ) ); return *this; } @@ -45115,6 +65034,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -45125,6 +65045,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceIndexTypeUint8FeaturesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceIndexTypeUint8FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -45136,37 +65060,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8 = {}; + }; static_assert( sizeof( PhysicalDeviceIndexTypeUint8FeaturesEXT ) == sizeof( VkPhysicalDeviceIndexTypeUint8FeaturesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceIndexTypeUint8FeaturesEXT; + }; + struct PhysicalDeviceInlineUniformBlockFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {} ) VULKAN_HPP_NOEXCEPT - : inlineUniformBlock( inlineUniformBlock_ ) - , descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {}) VULKAN_HPP_NOEXCEPT + : inlineUniformBlock( inlineUniformBlock_ ), descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeaturesEXT ) - offsetof( PhysicalDeviceInlineUniformBlockFeaturesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeaturesEXT( PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceInlineUniformBlockFeaturesEXT( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceInlineUniformBlockFeaturesEXT& operator=( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceInlineUniformBlockFeaturesEXT & operator=( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceInlineUniformBlockFeaturesEXT & operator=( PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceInlineUniformBlockFeaturesEXT ) ); return *this; } @@ -45188,6 +65127,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -45198,6 +65138,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceInlineUniformBlockFeaturesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceInlineUniformBlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -45210,47 +65154,57 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {}; VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {}; + }; static_assert( sizeof( PhysicalDeviceInlineUniformBlockFeaturesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockFeaturesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceInlineUniformBlockFeaturesEXT; + }; + struct PhysicalDeviceInlineUniformBlockPropertiesEXT { - PhysicalDeviceInlineUniformBlockPropertiesEXT( uint32_t maxInlineUniformBlockSize_ = {}, - uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, - uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {} ) VULKAN_HPP_NOEXCEPT - : maxInlineUniformBlockSize( maxInlineUniformBlockSize_ ) - , maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ ) - , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ ) - , maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ ) - , maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockPropertiesEXT(uint32_t maxInlineUniformBlockSize_ = {}, uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {}) VULKAN_HPP_NOEXCEPT + : maxInlineUniformBlockSize( maxInlineUniformBlockSize_ ), maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ ), maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ ), maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ ), maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockPropertiesEXT ) - offsetof( PhysicalDeviceInlineUniformBlockPropertiesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockPropertiesEXT( PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceInlineUniformBlockPropertiesEXT( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceInlineUniformBlockPropertiesEXT& operator=( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceInlineUniformBlockPropertiesEXT & operator=( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceInlineUniformBlockPropertiesEXT & operator=( PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceInlineUniformBlockPropertiesEXT ) ); + return *this; + } + + operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -45261,6 +65215,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceInlineUniformBlockPropertiesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceInlineUniformBlockPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -45276,6 +65234,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT; @@ -45285,510 +65246,44 @@ namespace VULKAN_HPP_NAMESPACE uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {}; uint32_t maxDescriptorSetInlineUniformBlocks = {}; uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {}; + }; static_assert( sizeof( PhysicalDeviceInlineUniformBlockPropertiesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockPropertiesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct PhysicalDeviceLimits + template <> + struct CppType { - PhysicalDeviceLimits( uint32_t maxImageDimension1D_ = {}, - uint32_t maxImageDimension2D_ = {}, - uint32_t maxImageDimension3D_ = {}, - uint32_t maxImageDimensionCube_ = {}, - uint32_t maxImageArrayLayers_ = {}, - uint32_t maxTexelBufferElements_ = {}, - uint32_t maxUniformBufferRange_ = {}, - uint32_t maxStorageBufferRange_ = {}, - uint32_t maxPushConstantsSize_ = {}, - uint32_t maxMemoryAllocationCount_ = {}, - uint32_t maxSamplerAllocationCount_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize_ = {}, - uint32_t maxBoundDescriptorSets_ = {}, - uint32_t maxPerStageDescriptorSamplers_ = {}, - uint32_t maxPerStageDescriptorUniformBuffers_ = {}, - uint32_t maxPerStageDescriptorStorageBuffers_ = {}, - uint32_t maxPerStageDescriptorSampledImages_ = {}, - uint32_t maxPerStageDescriptorStorageImages_ = {}, - uint32_t maxPerStageDescriptorInputAttachments_ = {}, - uint32_t maxPerStageResources_ = {}, - uint32_t maxDescriptorSetSamplers_ = {}, - uint32_t maxDescriptorSetUniformBuffers_ = {}, - uint32_t maxDescriptorSetUniformBuffersDynamic_ = {}, - uint32_t maxDescriptorSetStorageBuffers_ = {}, - uint32_t maxDescriptorSetStorageBuffersDynamic_ = {}, - uint32_t maxDescriptorSetSampledImages_ = {}, - uint32_t maxDescriptorSetStorageImages_ = {}, - uint32_t maxDescriptorSetInputAttachments_ = {}, - uint32_t maxVertexInputAttributes_ = {}, - uint32_t maxVertexInputBindings_ = {}, - uint32_t maxVertexInputAttributeOffset_ = {}, - uint32_t maxVertexInputBindingStride_ = {}, - uint32_t maxVertexOutputComponents_ = {}, - uint32_t maxTessellationGenerationLevel_ = {}, - uint32_t maxTessellationPatchSize_ = {}, - uint32_t maxTessellationControlPerVertexInputComponents_ = {}, - uint32_t maxTessellationControlPerVertexOutputComponents_ = {}, - uint32_t maxTessellationControlPerPatchOutputComponents_ = {}, - uint32_t maxTessellationControlTotalOutputComponents_ = {}, - uint32_t maxTessellationEvaluationInputComponents_ = {}, - uint32_t maxTessellationEvaluationOutputComponents_ = {}, - uint32_t maxGeometryShaderInvocations_ = {}, - uint32_t maxGeometryInputComponents_ = {}, - uint32_t maxGeometryOutputComponents_ = {}, - uint32_t maxGeometryOutputVertices_ = {}, - uint32_t maxGeometryTotalOutputComponents_ = {}, - uint32_t maxFragmentInputComponents_ = {}, - uint32_t maxFragmentOutputAttachments_ = {}, - uint32_t maxFragmentDualSrcAttachments_ = {}, - uint32_t maxFragmentCombinedOutputResources_ = {}, - uint32_t maxComputeSharedMemorySize_ = {}, - std::array const& maxComputeWorkGroupCount_ = {}, - uint32_t maxComputeWorkGroupInvocations_ = {}, - std::array const& maxComputeWorkGroupSize_ = {}, - uint32_t subPixelPrecisionBits_ = {}, - uint32_t subTexelPrecisionBits_ = {}, - uint32_t mipmapPrecisionBits_ = {}, - uint32_t maxDrawIndexedIndexValue_ = {}, - uint32_t maxDrawIndirectCount_ = {}, - float maxSamplerLodBias_ = {}, - float maxSamplerAnisotropy_ = {}, - uint32_t maxViewports_ = {}, - std::array const& maxViewportDimensions_ = {}, - std::array const& viewportBoundsRange_ = {}, - uint32_t viewportSubPixelBits_ = {}, - size_t minMemoryMapAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment_ = {}, - int32_t minTexelOffset_ = {}, - uint32_t maxTexelOffset_ = {}, - int32_t minTexelGatherOffset_ = {}, - uint32_t maxTexelGatherOffset_ = {}, - float minInterpolationOffset_ = {}, - float maxInterpolationOffset_ = {}, - uint32_t subPixelInterpolationOffsetBits_ = {}, - uint32_t maxFramebufferWidth_ = {}, - uint32_t maxFramebufferHeight_ = {}, - uint32_t maxFramebufferLayers_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts_ = {}, - uint32_t maxColorAttachments_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts_ = {}, - uint32_t maxSampleMaskWords_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics_ = {}, - float timestampPeriod_ = {}, - uint32_t maxClipDistances_ = {}, - uint32_t maxCullDistances_ = {}, - uint32_t maxCombinedClipAndCullDistances_ = {}, - uint32_t discreteQueuePriorities_ = {}, - std::array const& pointSizeRange_ = {}, - std::array const& lineWidthRange_ = {}, - float pointSizeGranularity_ = {}, - float lineWidthGranularity_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 strictLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize_ = {} ) VULKAN_HPP_NOEXCEPT - : maxImageDimension1D( maxImageDimension1D_ ) - , maxImageDimension2D( maxImageDimension2D_ ) - , maxImageDimension3D( maxImageDimension3D_ ) - , maxImageDimensionCube( maxImageDimensionCube_ ) - , maxImageArrayLayers( maxImageArrayLayers_ ) - , maxTexelBufferElements( maxTexelBufferElements_ ) - , maxUniformBufferRange( maxUniformBufferRange_ ) - , maxStorageBufferRange( maxStorageBufferRange_ ) - , maxPushConstantsSize( maxPushConstantsSize_ ) - , maxMemoryAllocationCount( maxMemoryAllocationCount_ ) - , maxSamplerAllocationCount( maxSamplerAllocationCount_ ) - , bufferImageGranularity( bufferImageGranularity_ ) - , sparseAddressSpaceSize( sparseAddressSpaceSize_ ) - , maxBoundDescriptorSets( maxBoundDescriptorSets_ ) - , maxPerStageDescriptorSamplers( maxPerStageDescriptorSamplers_ ) - , maxPerStageDescriptorUniformBuffers( maxPerStageDescriptorUniformBuffers_ ) - , maxPerStageDescriptorStorageBuffers( maxPerStageDescriptorStorageBuffers_ ) - , maxPerStageDescriptorSampledImages( maxPerStageDescriptorSampledImages_ ) - , maxPerStageDescriptorStorageImages( maxPerStageDescriptorStorageImages_ ) - , maxPerStageDescriptorInputAttachments( maxPerStageDescriptorInputAttachments_ ) - , maxPerStageResources( maxPerStageResources_ ) - , maxDescriptorSetSamplers( maxDescriptorSetSamplers_ ) - , maxDescriptorSetUniformBuffers( maxDescriptorSetUniformBuffers_ ) - , maxDescriptorSetUniformBuffersDynamic( maxDescriptorSetUniformBuffersDynamic_ ) - , maxDescriptorSetStorageBuffers( maxDescriptorSetStorageBuffers_ ) - , maxDescriptorSetStorageBuffersDynamic( maxDescriptorSetStorageBuffersDynamic_ ) - , maxDescriptorSetSampledImages( maxDescriptorSetSampledImages_ ) - , maxDescriptorSetStorageImages( maxDescriptorSetStorageImages_ ) - , maxDescriptorSetInputAttachments( maxDescriptorSetInputAttachments_ ) - , maxVertexInputAttributes( maxVertexInputAttributes_ ) - , maxVertexInputBindings( maxVertexInputBindings_ ) - , maxVertexInputAttributeOffset( maxVertexInputAttributeOffset_ ) - , maxVertexInputBindingStride( maxVertexInputBindingStride_ ) - , maxVertexOutputComponents( maxVertexOutputComponents_ ) - , maxTessellationGenerationLevel( maxTessellationGenerationLevel_ ) - , maxTessellationPatchSize( maxTessellationPatchSize_ ) - , maxTessellationControlPerVertexInputComponents( maxTessellationControlPerVertexInputComponents_ ) - , maxTessellationControlPerVertexOutputComponents( maxTessellationControlPerVertexOutputComponents_ ) - , maxTessellationControlPerPatchOutputComponents( maxTessellationControlPerPatchOutputComponents_ ) - , maxTessellationControlTotalOutputComponents( maxTessellationControlTotalOutputComponents_ ) - , maxTessellationEvaluationInputComponents( maxTessellationEvaluationInputComponents_ ) - , maxTessellationEvaluationOutputComponents( maxTessellationEvaluationOutputComponents_ ) - , maxGeometryShaderInvocations( maxGeometryShaderInvocations_ ) - , maxGeometryInputComponents( maxGeometryInputComponents_ ) - , maxGeometryOutputComponents( maxGeometryOutputComponents_ ) - , maxGeometryOutputVertices( maxGeometryOutputVertices_ ) - , maxGeometryTotalOutputComponents( maxGeometryTotalOutputComponents_ ) - , maxFragmentInputComponents( maxFragmentInputComponents_ ) - , maxFragmentOutputAttachments( maxFragmentOutputAttachments_ ) - , maxFragmentDualSrcAttachments( maxFragmentDualSrcAttachments_ ) - , maxFragmentCombinedOutputResources( maxFragmentCombinedOutputResources_ ) - , maxComputeSharedMemorySize( maxComputeSharedMemorySize_ ) - , maxComputeWorkGroupCount{} - , maxComputeWorkGroupInvocations( maxComputeWorkGroupInvocations_ ) - , maxComputeWorkGroupSize{} - , subPixelPrecisionBits( subPixelPrecisionBits_ ) - , subTexelPrecisionBits( subTexelPrecisionBits_ ) - , mipmapPrecisionBits( mipmapPrecisionBits_ ) - , maxDrawIndexedIndexValue( maxDrawIndexedIndexValue_ ) - , maxDrawIndirectCount( maxDrawIndirectCount_ ) - , maxSamplerLodBias( maxSamplerLodBias_ ) - , maxSamplerAnisotropy( maxSamplerAnisotropy_ ) - , maxViewports( maxViewports_ ) - , maxViewportDimensions{} - , viewportBoundsRange{} - , viewportSubPixelBits( viewportSubPixelBits_ ) - , minMemoryMapAlignment( minMemoryMapAlignment_ ) - , minTexelBufferOffsetAlignment( minTexelBufferOffsetAlignment_ ) - , minUniformBufferOffsetAlignment( minUniformBufferOffsetAlignment_ ) - , minStorageBufferOffsetAlignment( minStorageBufferOffsetAlignment_ ) - , minTexelOffset( minTexelOffset_ ) - , maxTexelOffset( maxTexelOffset_ ) - , minTexelGatherOffset( minTexelGatherOffset_ ) - , maxTexelGatherOffset( maxTexelGatherOffset_ ) - , minInterpolationOffset( minInterpolationOffset_ ) - , maxInterpolationOffset( maxInterpolationOffset_ ) - , subPixelInterpolationOffsetBits( subPixelInterpolationOffsetBits_ ) - , maxFramebufferWidth( maxFramebufferWidth_ ) - , maxFramebufferHeight( maxFramebufferHeight_ ) - , maxFramebufferLayers( maxFramebufferLayers_ ) - , framebufferColorSampleCounts( framebufferColorSampleCounts_ ) - , framebufferDepthSampleCounts( framebufferDepthSampleCounts_ ) - , framebufferStencilSampleCounts( framebufferStencilSampleCounts_ ) - , framebufferNoAttachmentsSampleCounts( framebufferNoAttachmentsSampleCounts_ ) - , maxColorAttachments( maxColorAttachments_ ) - , sampledImageColorSampleCounts( sampledImageColorSampleCounts_ ) - , sampledImageIntegerSampleCounts( sampledImageIntegerSampleCounts_ ) - , sampledImageDepthSampleCounts( sampledImageDepthSampleCounts_ ) - , sampledImageStencilSampleCounts( sampledImageStencilSampleCounts_ ) - , storageImageSampleCounts( storageImageSampleCounts_ ) - , maxSampleMaskWords( maxSampleMaskWords_ ) - , timestampComputeAndGraphics( timestampComputeAndGraphics_ ) - , timestampPeriod( timestampPeriod_ ) - , maxClipDistances( maxClipDistances_ ) - , maxCullDistances( maxCullDistances_ ) - , maxCombinedClipAndCullDistances( maxCombinedClipAndCullDistances_ ) - , discreteQueuePriorities( discreteQueuePriorities_ ) - , pointSizeRange{} - , lineWidthRange{} - , pointSizeGranularity( pointSizeGranularity_ ) - , lineWidthGranularity( lineWidthGranularity_ ) - , strictLines( strictLines_ ) - , standardSampleLocations( standardSampleLocations_ ) - , optimalBufferCopyOffsetAlignment( optimalBufferCopyOffsetAlignment_ ) - , optimalBufferCopyRowPitchAlignment( optimalBufferCopyRowPitchAlignment_ ) - , nonCoherentAtomSize( nonCoherentAtomSize_ ) - { - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( maxComputeWorkGroupCount, maxComputeWorkGroupCount_ ); - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( maxComputeWorkGroupSize, maxComputeWorkGroupSize_ ); - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( maxViewportDimensions, maxViewportDimensions_ ); - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( viewportBoundsRange, viewportBoundsRange_ ); - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( pointSizeRange, pointSizeRange_ ); - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( lineWidthRange, lineWidthRange_ ); - } - - PhysicalDeviceLimits( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceLimits& operator=( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkPhysicalDeviceLimits const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceLimits &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceLimits const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( maxImageDimension1D == rhs.maxImageDimension1D ) - && ( maxImageDimension2D == rhs.maxImageDimension2D ) - && ( maxImageDimension3D == rhs.maxImageDimension3D ) - && ( maxImageDimensionCube == rhs.maxImageDimensionCube ) - && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) - && ( maxTexelBufferElements == rhs.maxTexelBufferElements ) - && ( maxUniformBufferRange == rhs.maxUniformBufferRange ) - && ( maxStorageBufferRange == rhs.maxStorageBufferRange ) - && ( maxPushConstantsSize == rhs.maxPushConstantsSize ) - && ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount ) - && ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount ) - && ( bufferImageGranularity == rhs.bufferImageGranularity ) - && ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize ) - && ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets ) - && ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers ) - && ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers ) - && ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers ) - && ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages ) - && ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages ) - && ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments ) - && ( maxPerStageResources == rhs.maxPerStageResources ) - && ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers ) - && ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers ) - && ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic ) - && ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers ) - && ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic ) - && ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages ) - && ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages ) - && ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments ) - && ( maxVertexInputAttributes == rhs.maxVertexInputAttributes ) - && ( maxVertexInputBindings == rhs.maxVertexInputBindings ) - && ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset ) - && ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride ) - && ( maxVertexOutputComponents == rhs.maxVertexOutputComponents ) - && ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel ) - && ( maxTessellationPatchSize == rhs.maxTessellationPatchSize ) - && ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents ) - && ( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents ) - && ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents ) - && ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents ) - && ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents ) - && ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents ) - && ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations ) - && ( maxGeometryInputComponents == rhs.maxGeometryInputComponents ) - && ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents ) - && ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices ) - && ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents ) - && ( maxFragmentInputComponents == rhs.maxFragmentInputComponents ) - && ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments ) - && ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments ) - && ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources ) - && ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize ) - && ( memcmp( maxComputeWorkGroupCount, rhs.maxComputeWorkGroupCount, 3 * sizeof( uint32_t ) ) == 0 ) - && ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations ) - && ( memcmp( maxComputeWorkGroupSize, rhs.maxComputeWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 ) - && ( subPixelPrecisionBits == rhs.subPixelPrecisionBits ) - && ( subTexelPrecisionBits == rhs.subTexelPrecisionBits ) - && ( mipmapPrecisionBits == rhs.mipmapPrecisionBits ) - && ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue ) - && ( maxDrawIndirectCount == rhs.maxDrawIndirectCount ) - && ( maxSamplerLodBias == rhs.maxSamplerLodBias ) - && ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy ) - && ( maxViewports == rhs.maxViewports ) - && ( memcmp( maxViewportDimensions, rhs.maxViewportDimensions, 2 * sizeof( uint32_t ) ) == 0 ) - && ( memcmp( viewportBoundsRange, rhs.viewportBoundsRange, 2 * sizeof( float ) ) == 0 ) - && ( viewportSubPixelBits == rhs.viewportSubPixelBits ) - && ( minMemoryMapAlignment == rhs.minMemoryMapAlignment ) - && ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment ) - && ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment ) - && ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment ) - && ( minTexelOffset == rhs.minTexelOffset ) - && ( maxTexelOffset == rhs.maxTexelOffset ) - && ( minTexelGatherOffset == rhs.minTexelGatherOffset ) - && ( maxTexelGatherOffset == rhs.maxTexelGatherOffset ) - && ( minInterpolationOffset == rhs.minInterpolationOffset ) - && ( maxInterpolationOffset == rhs.maxInterpolationOffset ) - && ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits ) - && ( maxFramebufferWidth == rhs.maxFramebufferWidth ) - && ( maxFramebufferHeight == rhs.maxFramebufferHeight ) - && ( maxFramebufferLayers == rhs.maxFramebufferLayers ) - && ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts ) - && ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts ) - && ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts ) - && ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts ) - && ( maxColorAttachments == rhs.maxColorAttachments ) - && ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts ) - && ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts ) - && ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts ) - && ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts ) - && ( storageImageSampleCounts == rhs.storageImageSampleCounts ) - && ( maxSampleMaskWords == rhs.maxSampleMaskWords ) - && ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics ) - && ( timestampPeriod == rhs.timestampPeriod ) - && ( maxClipDistances == rhs.maxClipDistances ) - && ( maxCullDistances == rhs.maxCullDistances ) - && ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances ) - && ( discreteQueuePriorities == rhs.discreteQueuePriorities ) - && ( memcmp( pointSizeRange, rhs.pointSizeRange, 2 * sizeof( float ) ) == 0 ) - && ( memcmp( lineWidthRange, rhs.lineWidthRange, 2 * sizeof( float ) ) == 0 ) - && ( pointSizeGranularity == rhs.pointSizeGranularity ) - && ( lineWidthGranularity == rhs.lineWidthGranularity ) - && ( strictLines == rhs.strictLines ) - && ( standardSampleLocations == rhs.standardSampleLocations ) - && ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment ) - && ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment ) - && ( nonCoherentAtomSize == rhs.nonCoherentAtomSize ); - } - - bool operator!=( PhysicalDeviceLimits const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - uint32_t maxImageDimension1D = {}; - uint32_t maxImageDimension2D = {}; - uint32_t maxImageDimension3D = {}; - uint32_t maxImageDimensionCube = {}; - uint32_t maxImageArrayLayers = {}; - uint32_t maxTexelBufferElements = {}; - uint32_t maxUniformBufferRange = {}; - uint32_t maxStorageBufferRange = {}; - uint32_t maxPushConstantsSize = {}; - uint32_t maxMemoryAllocationCount = {}; - uint32_t maxSamplerAllocationCount = {}; - VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity = {}; - VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize = {}; - uint32_t maxBoundDescriptorSets = {}; - uint32_t maxPerStageDescriptorSamplers = {}; - uint32_t maxPerStageDescriptorUniformBuffers = {}; - uint32_t maxPerStageDescriptorStorageBuffers = {}; - uint32_t maxPerStageDescriptorSampledImages = {}; - uint32_t maxPerStageDescriptorStorageImages = {}; - uint32_t maxPerStageDescriptorInputAttachments = {}; - uint32_t maxPerStageResources = {}; - uint32_t maxDescriptorSetSamplers = {}; - uint32_t maxDescriptorSetUniformBuffers = {}; - uint32_t maxDescriptorSetUniformBuffersDynamic = {}; - uint32_t maxDescriptorSetStorageBuffers = {}; - uint32_t maxDescriptorSetStorageBuffersDynamic = {}; - uint32_t maxDescriptorSetSampledImages = {}; - uint32_t maxDescriptorSetStorageImages = {}; - uint32_t maxDescriptorSetInputAttachments = {}; - uint32_t maxVertexInputAttributes = {}; - uint32_t maxVertexInputBindings = {}; - uint32_t maxVertexInputAttributeOffset = {}; - uint32_t maxVertexInputBindingStride = {}; - uint32_t maxVertexOutputComponents = {}; - uint32_t maxTessellationGenerationLevel = {}; - uint32_t maxTessellationPatchSize = {}; - uint32_t maxTessellationControlPerVertexInputComponents = {}; - uint32_t maxTessellationControlPerVertexOutputComponents = {}; - uint32_t maxTessellationControlPerPatchOutputComponents = {}; - uint32_t maxTessellationControlTotalOutputComponents = {}; - uint32_t maxTessellationEvaluationInputComponents = {}; - uint32_t maxTessellationEvaluationOutputComponents = {}; - uint32_t maxGeometryShaderInvocations = {}; - uint32_t maxGeometryInputComponents = {}; - uint32_t maxGeometryOutputComponents = {}; - uint32_t maxGeometryOutputVertices = {}; - uint32_t maxGeometryTotalOutputComponents = {}; - uint32_t maxFragmentInputComponents = {}; - uint32_t maxFragmentOutputAttachments = {}; - uint32_t maxFragmentDualSrcAttachments = {}; - uint32_t maxFragmentCombinedOutputResources = {}; - uint32_t maxComputeSharedMemorySize = {}; - uint32_t maxComputeWorkGroupCount[3] = {}; - uint32_t maxComputeWorkGroupInvocations = {}; - uint32_t maxComputeWorkGroupSize[3] = {}; - uint32_t subPixelPrecisionBits = {}; - uint32_t subTexelPrecisionBits = {}; - uint32_t mipmapPrecisionBits = {}; - uint32_t maxDrawIndexedIndexValue = {}; - uint32_t maxDrawIndirectCount = {}; - float maxSamplerLodBias = {}; - float maxSamplerAnisotropy = {}; - uint32_t maxViewports = {}; - uint32_t maxViewportDimensions[2] = {}; - float viewportBoundsRange[2] = {}; - uint32_t viewportSubPixelBits = {}; - size_t minMemoryMapAlignment = {}; - VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment = {}; - VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment = {}; - VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment = {}; - int32_t minTexelOffset = {}; - uint32_t maxTexelOffset = {}; - int32_t minTexelGatherOffset = {}; - uint32_t maxTexelGatherOffset = {}; - float minInterpolationOffset = {}; - float maxInterpolationOffset = {}; - uint32_t subPixelInterpolationOffsetBits = {}; - uint32_t maxFramebufferWidth = {}; - uint32_t maxFramebufferHeight = {}; - uint32_t maxFramebufferLayers = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts = {}; - uint32_t maxColorAttachments = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts = {}; - uint32_t maxSampleMaskWords = {}; - VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics = {}; - float timestampPeriod = {}; - uint32_t maxClipDistances = {}; - uint32_t maxCullDistances = {}; - uint32_t maxCombinedClipAndCullDistances = {}; - uint32_t discreteQueuePriorities = {}; - float pointSizeRange[2] = {}; - float lineWidthRange[2] = {}; - float pointSizeGranularity = {}; - float lineWidthGranularity = {}; - VULKAN_HPP_NAMESPACE::Bool32 strictLines = {}; - VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations = {}; - VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment = {}; - VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment = {}; - VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize = {}; + using Type = PhysicalDeviceInlineUniformBlockPropertiesEXT; }; - static_assert( sizeof( PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PhysicalDeviceLineRasterizationFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {} ) VULKAN_HPP_NOEXCEPT - : rectangularLines( rectangularLines_ ) - , bresenhamLines( bresenhamLines_ ) - , smoothLines( smoothLines_ ) - , stippledRectangularLines( stippledRectangularLines_ ) - , stippledBresenhamLines( stippledBresenhamLines_ ) - , stippledSmoothLines( stippledSmoothLines_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {}) VULKAN_HPP_NOEXCEPT + : rectangularLines( rectangularLines_ ), bresenhamLines( bresenhamLines_ ), smoothLines( smoothLines_ ), stippledRectangularLines( stippledRectangularLines_ ), stippledBresenhamLines( stippledBresenhamLines_ ), stippledSmoothLines( stippledSmoothLines_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT ) - offsetof( PhysicalDeviceLineRasterizationFeaturesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceLineRasterizationFeaturesEXT( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceLineRasterizationFeaturesEXT& operator=( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceLineRasterizationFeaturesEXT & operator=( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceLineRasterizationFeaturesEXT & operator=( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceLineRasterizationFeaturesEXT ) ); return *this; } @@ -45834,6 +65329,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceLineRasterizationFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -45844,6 +65340,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceLineRasterizationFeaturesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceLineRasterizationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -45860,6 +65360,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT; @@ -45870,33 +65373,48 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines = {}; VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines = {}; VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines = {}; + }; static_assert( sizeof( PhysicalDeviceLineRasterizationFeaturesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationFeaturesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceLineRasterizationFeaturesEXT; + }; + struct PhysicalDeviceLineRasterizationPropertiesEXT { - PhysicalDeviceLineRasterizationPropertiesEXT( uint32_t lineSubPixelPrecisionBits_ = {} ) VULKAN_HPP_NOEXCEPT - : lineSubPixelPrecisionBits( lineSubPixelPrecisionBits_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT(uint32_t lineSubPixelPrecisionBits_ = {}) VULKAN_HPP_NOEXCEPT + : lineSubPixelPrecisionBits( lineSubPixelPrecisionBits_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT ) - offsetof( PhysicalDeviceLineRasterizationPropertiesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceLineRasterizationPropertiesEXT( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceLineRasterizationPropertiesEXT& operator=( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceLineRasterizationPropertiesEXT & operator=( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceLineRasterizationPropertiesEXT & operator=( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceLineRasterizationPropertiesEXT ) ); + return *this; + } + + operator VkPhysicalDeviceLineRasterizationPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -45907,6 +65425,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceLineRasterizationPropertiesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceLineRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -45918,40 +65440,56 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT; void* pNext = {}; uint32_t lineSubPixelPrecisionBits = {}; + }; static_assert( sizeof( PhysicalDeviceLineRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationPropertiesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceLineRasterizationPropertiesEXT; + }; + struct PhysicalDeviceMaintenance3Properties { - PhysicalDeviceMaintenance3Properties( uint32_t maxPerSetDescriptors_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {} ) VULKAN_HPP_NOEXCEPT - : maxPerSetDescriptors( maxPerSetDescriptors_ ) - , maxMemoryAllocationSize( maxMemoryAllocationSize_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance3Properties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties(uint32_t maxPerSetDescriptors_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {}) VULKAN_HPP_NOEXCEPT + : maxPerSetDescriptors( maxPerSetDescriptors_ ), maxMemoryAllocationSize( maxMemoryAllocationSize_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties ) - offsetof( PhysicalDeviceMaintenance3Properties, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMaintenance3Properties( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceMaintenance3Properties& operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMaintenance3Properties & operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceMaintenance3Properties & operator=( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceMaintenance3Properties ) ); + return *this; + } + + operator VkPhysicalDeviceMaintenance3Properties const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -45962,6 +65500,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceMaintenance3Properties const& ) const = default; +#else bool operator==( PhysicalDeviceMaintenance3Properties const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -45974,44 +65516,58 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties; void* pNext = {}; uint32_t maxPerSetDescriptors = {}; VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {}; + }; static_assert( sizeof( PhysicalDeviceMaintenance3Properties ) == sizeof( VkPhysicalDeviceMaintenance3Properties ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance3Properties; + }; + using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties; + struct PhysicalDeviceMemoryBudgetPropertiesEXT { - PhysicalDeviceMemoryBudgetPropertiesEXT( std::array const& heapBudget_ = {}, - std::array const& heapUsage_ = {} ) VULKAN_HPP_NOEXCEPT - : heapBudget{} - , heapUsage{} - { - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( heapBudget, heapBudget_ ); - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( heapUsage, heapUsage_ ); - } + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT; - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT ) - offsetof( PhysicalDeviceMemoryBudgetPropertiesEXT, pNext ) ); - return *this; - } +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT(std::array const& heapBudget_ = {}, std::array const& heapUsage_ = {}) VULKAN_HPP_NOEXCEPT + : heapBudget( heapBudget_ ), heapUsage( heapUsage_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMemoryBudgetPropertiesEXT( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceMemoryBudgetPropertiesEXT& operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceMemoryBudgetPropertiesEXT ) ); + return *this; + } + + operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -46022,48 +65578,69 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceMemoryBudgetPropertiesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceMemoryBudgetPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) - && ( memcmp( heapBudget, rhs.heapBudget, VK_MAX_MEMORY_HEAPS * sizeof( VULKAN_HPP_NAMESPACE::DeviceSize ) ) == 0 ) - && ( memcmp( heapUsage, rhs.heapUsage, VK_MAX_MEMORY_HEAPS * sizeof( VULKAN_HPP_NAMESPACE::DeviceSize ) ) == 0 ); + && ( heapBudget == rhs.heapBudget ) + && ( heapUsage == rhs.heapUsage ); } bool operator!=( PhysicalDeviceMemoryBudgetPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT; void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceSize heapBudget[VK_MAX_MEMORY_HEAPS] = {}; - VULKAN_HPP_NAMESPACE::DeviceSize heapUsage[VK_MAX_MEMORY_HEAPS] = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D heapBudget = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D heapUsage = {}; + }; static_assert( sizeof( PhysicalDeviceMemoryBudgetPropertiesEXT ) == sizeof( VkPhysicalDeviceMemoryBudgetPropertiesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceMemoryBudgetPropertiesEXT; + }; + struct PhysicalDeviceMemoryPriorityFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ = {} ) VULKAN_HPP_NOEXCEPT - : memoryPriority( memoryPriority_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ = {}) VULKAN_HPP_NOEXCEPT + : memoryPriority( memoryPriority_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT ) - offsetof( PhysicalDeviceMemoryPriorityFeaturesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMemoryPriorityFeaturesEXT( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceMemoryPriorityFeaturesEXT& operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceMemoryPriorityFeaturesEXT ) ); return *this; } @@ -46079,6 +65656,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -46089,6 +65667,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceMemoryPriorityFeaturesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceMemoryPriorityFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -46100,148 +65682,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 memoryPriority = {}; + }; static_assert( sizeof( PhysicalDeviceMemoryPriorityFeaturesEXT ) == sizeof( VkPhysicalDeviceMemoryPriorityFeaturesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct PhysicalDeviceMemoryProperties + template <> + struct CppType { - PhysicalDeviceMemoryProperties( uint32_t memoryTypeCount_ = {}, - std::array const& memoryTypes_ = {}, - uint32_t memoryHeapCount_ = {}, - std::array const& memoryHeaps_ = {} ) VULKAN_HPP_NOEXCEPT - : memoryTypeCount( memoryTypeCount_ ) - , memoryTypes{} - , memoryHeapCount( memoryHeapCount_ ) - , memoryHeaps{} - { - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( memoryTypes, memoryTypes_ ); - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( memoryHeaps, memoryHeaps_ ); - } - - PhysicalDeviceMemoryProperties( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceMemoryProperties& operator=( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkPhysicalDeviceMemoryProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceMemoryProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( memoryTypeCount == rhs.memoryTypeCount ) - && ( memcmp( memoryTypes, rhs.memoryTypes, std::min( VK_MAX_MEMORY_TYPES, memoryTypeCount ) * sizeof( VULKAN_HPP_NAMESPACE::MemoryType ) ) == 0 ) - && ( memoryHeapCount == rhs.memoryHeapCount ) - && ( memcmp( memoryHeaps, rhs.memoryHeaps, std::min( VK_MAX_MEMORY_HEAPS, memoryHeapCount ) * sizeof( VULKAN_HPP_NAMESPACE::MemoryHeap ) ) == 0 ); - } - - bool operator!=( PhysicalDeviceMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - uint32_t memoryTypeCount = {}; - VULKAN_HPP_NAMESPACE::MemoryType memoryTypes[VK_MAX_MEMORY_TYPES] = {}; - uint32_t memoryHeapCount = {}; - VULKAN_HPP_NAMESPACE::MemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS] = {}; + using Type = PhysicalDeviceMemoryPriorityFeaturesEXT; }; - static_assert( sizeof( PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct PhysicalDeviceMemoryProperties2 - { - PhysicalDeviceMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT - : memoryProperties( memoryProperties_ ) - {} - - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 ) - offsetof( PhysicalDeviceMemoryProperties2, pNext ) ); - return *this; - } - - PhysicalDeviceMemoryProperties2( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceMemoryProperties2& operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkPhysicalDeviceMemoryProperties2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceMemoryProperties2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceMemoryProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryProperties == rhs.memoryProperties ); - } - - bool operator!=( PhysicalDeviceMemoryProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties = {}; - }; - static_assert( sizeof( PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PhysicalDeviceMeshShaderFeaturesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {} ) VULKAN_HPP_NOEXCEPT - : taskShader( taskShader_ ) - , meshShader( meshShader_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {}) VULKAN_HPP_NOEXCEPT + : taskShader( taskShader_ ), meshShader( meshShader_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV ) - offsetof( PhysicalDeviceMeshShaderFeaturesNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceMeshShaderFeaturesNV& operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMeshShaderFeaturesNV & operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceMeshShaderFeaturesNV & operator=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceMeshShaderFeaturesNV ) ); return *this; } @@ -46263,6 +65749,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceMeshShaderFeaturesNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -46273,6 +65760,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceMeshShaderFeaturesNV const& ) const = default; +#else bool operator==( PhysicalDeviceMeshShaderFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -46285,66 +65776,57 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 taskShader = {}; VULKAN_HPP_NAMESPACE::Bool32 meshShader = {}; + }; static_assert( sizeof( PhysicalDeviceMeshShaderFeaturesNV ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceMeshShaderFeaturesNV; + }; + struct PhysicalDeviceMeshShaderPropertiesNV { - PhysicalDeviceMeshShaderPropertiesNV( uint32_t maxDrawMeshTasksCount_ = {}, - uint32_t maxTaskWorkGroupInvocations_ = {}, - std::array const& maxTaskWorkGroupSize_ = {}, - uint32_t maxTaskTotalMemorySize_ = {}, - uint32_t maxTaskOutputCount_ = {}, - uint32_t maxMeshWorkGroupInvocations_ = {}, - std::array const& maxMeshWorkGroupSize_ = {}, - uint32_t maxMeshTotalMemorySize_ = {}, - uint32_t maxMeshOutputVertices_ = {}, - uint32_t maxMeshOutputPrimitives_ = {}, - uint32_t maxMeshMultiviewViewCount_ = {}, - uint32_t meshOutputPerVertexGranularity_ = {}, - uint32_t meshOutputPerPrimitiveGranularity_ = {} ) VULKAN_HPP_NOEXCEPT - : maxDrawMeshTasksCount( maxDrawMeshTasksCount_ ) - , maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ ) - , maxTaskWorkGroupSize{} - , maxTaskTotalMemorySize( maxTaskTotalMemorySize_ ) - , maxTaskOutputCount( maxTaskOutputCount_ ) - , maxMeshWorkGroupInvocations( maxMeshWorkGroupInvocations_ ) - , maxMeshWorkGroupSize{} - , maxMeshTotalMemorySize( maxMeshTotalMemorySize_ ) - , maxMeshOutputVertices( maxMeshOutputVertices_ ) - , maxMeshOutputPrimitives( maxMeshOutputPrimitives_ ) - , maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ ) - , meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ ) - , meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ ) - { - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( maxTaskWorkGroupSize, maxTaskWorkGroupSize_ ); - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( maxMeshWorkGroupSize, maxMeshWorkGroupSize_ ); - } + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV; - VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV ) - offsetof( PhysicalDeviceMeshShaderPropertiesNV, pNext ) ); - return *this; - } +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV(uint32_t maxDrawMeshTasksCount_ = {}, uint32_t maxTaskWorkGroupInvocations_ = {}, std::array const& maxTaskWorkGroupSize_ = {}, uint32_t maxTaskTotalMemorySize_ = {}, uint32_t maxTaskOutputCount_ = {}, uint32_t maxMeshWorkGroupInvocations_ = {}, std::array const& maxMeshWorkGroupSize_ = {}, uint32_t maxMeshTotalMemorySize_ = {}, uint32_t maxMeshOutputVertices_ = {}, uint32_t maxMeshOutputPrimitives_ = {}, uint32_t maxMeshMultiviewViewCount_ = {}, uint32_t meshOutputPerVertexGranularity_ = {}, uint32_t meshOutputPerPrimitiveGranularity_ = {}) VULKAN_HPP_NOEXCEPT + : maxDrawMeshTasksCount( maxDrawMeshTasksCount_ ), maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ ), maxTaskWorkGroupSize( maxTaskWorkGroupSize_ ), maxTaskTotalMemorySize( maxTaskTotalMemorySize_ ), maxTaskOutputCount( maxTaskOutputCount_ ), maxMeshWorkGroupInvocations( maxMeshWorkGroupInvocations_ ), maxMeshWorkGroupSize( maxMeshWorkGroupSize_ ), maxMeshTotalMemorySize( maxMeshTotalMemorySize_ ), maxMeshOutputVertices( maxMeshOutputVertices_ ), maxMeshOutputPrimitives( maxMeshOutputPrimitives_ ), maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ ), meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ ), meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMeshShaderPropertiesNV( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceMeshShaderPropertiesNV& operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMeshShaderPropertiesNV & operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceMeshShaderPropertiesNV & operator=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceMeshShaderPropertiesNV ) ); + return *this; + } + + operator VkPhysicalDeviceMeshShaderPropertiesNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -46355,17 +65837,21 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceMeshShaderPropertiesNV const& ) const = default; +#else bool operator==( PhysicalDeviceMeshShaderPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount ) && ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations ) - && ( memcmp( maxTaskWorkGroupSize, rhs.maxTaskWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 ) + && ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize ) && ( maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize ) && ( maxTaskOutputCount == rhs.maxTaskOutputCount ) && ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations ) - && ( memcmp( maxMeshWorkGroupSize, rhs.maxMeshWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 ) + && ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize ) && ( maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize ) && ( maxMeshOutputVertices == rhs.maxMeshOutputVertices ) && ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives ) @@ -46378,51 +65864,64 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV; void* pNext = {}; uint32_t maxDrawMeshTasksCount = {}; uint32_t maxTaskWorkGroupInvocations = {}; - uint32_t maxTaskWorkGroupSize[3] = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxTaskWorkGroupSize = {}; uint32_t maxTaskTotalMemorySize = {}; uint32_t maxTaskOutputCount = {}; uint32_t maxMeshWorkGroupInvocations = {}; - uint32_t maxMeshWorkGroupSize[3] = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxMeshWorkGroupSize = {}; uint32_t maxMeshTotalMemorySize = {}; uint32_t maxMeshOutputVertices = {}; uint32_t maxMeshOutputPrimitives = {}; uint32_t maxMeshMultiviewViewCount = {}; uint32_t meshOutputPerVertexGranularity = {}; uint32_t meshOutputPerPrimitiveGranularity = {}; + }; static_assert( sizeof( PhysicalDeviceMeshShaderPropertiesNV ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceMeshShaderPropertiesNV; + }; + struct PhysicalDeviceMultiviewFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {} ) VULKAN_HPP_NOEXCEPT - : multiview( multiview_ ) - , multiviewGeometryShader( multiviewGeometryShader_ ) - , multiviewTessellationShader( multiviewTessellationShader_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures(VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}) VULKAN_HPP_NOEXCEPT + : multiview( multiview_ ), multiviewGeometryShader( multiviewGeometryShader_ ), multiviewTessellationShader( multiviewTessellationShader_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures ) - offsetof( PhysicalDeviceMultiviewFeatures, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceMultiviewFeatures& operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMultiviewFeatures & operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceMultiviewFeatures & operator=( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceMultiviewFeatures ) ); return *this; } @@ -46450,6 +65949,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceMultiviewFeatures const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -46460,6 +65960,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceMultiviewFeatures const& ) const = default; +#else bool operator==( PhysicalDeviceMultiviewFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -46473,6 +65977,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures; @@ -46480,33 +65987,49 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 multiview = {}; VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {}; VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {}; + }; static_assert( sizeof( PhysicalDeviceMultiviewFeatures ) == sizeof( VkPhysicalDeviceMultiviewFeatures ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceMultiviewFeatures; + }; + using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures; + struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX { - PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents_ = {} ) VULKAN_HPP_NOEXCEPT - : perViewPositionAllComponents( perViewPositionAllComponents_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents_ = {}) VULKAN_HPP_NOEXCEPT + : perViewPositionAllComponents( perViewPositionAllComponents_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) - offsetof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX& operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) ); + return *this; + } + + operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -46517,6 +66040,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& ) const = default; +#else bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -46528,40 +66055,56 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents = {}; + }; static_assert( sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) == sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + }; + struct PhysicalDeviceMultiviewProperties { - PhysicalDeviceMultiviewProperties( uint32_t maxMultiviewViewCount_ = {}, - uint32_t maxMultiviewInstanceIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : maxMultiviewViewCount( maxMultiviewViewCount_ ) - , maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties(uint32_t maxMultiviewViewCount_ = {}, uint32_t maxMultiviewInstanceIndex_ = {}) VULKAN_HPP_NOEXCEPT + : maxMultiviewViewCount( maxMultiviewViewCount_ ), maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties ) - offsetof( PhysicalDeviceMultiviewProperties, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMultiviewProperties( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceMultiviewProperties& operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMultiviewProperties & operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceMultiviewProperties & operator=( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceMultiviewProperties ) ); + return *this; + } + + operator VkPhysicalDeviceMultiviewProperties const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -46572,6 +66115,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceMultiviewProperties const& ) const = default; +#else bool operator==( PhysicalDeviceMultiviewProperties const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -46584,45 +66131,58 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties; void* pNext = {}; uint32_t maxMultiviewViewCount = {}; uint32_t maxMultiviewInstanceIndex = {}; + }; static_assert( sizeof( PhysicalDeviceMultiviewProperties ) == sizeof( VkPhysicalDeviceMultiviewProperties ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceMultiviewProperties; + }; + using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties; + struct PhysicalDevicePCIBusInfoPropertiesEXT { - PhysicalDevicePCIBusInfoPropertiesEXT( uint32_t pciDomain_ = {}, - uint32_t pciBus_ = {}, - uint32_t pciDevice_ = {}, - uint32_t pciFunction_ = {} ) VULKAN_HPP_NOEXCEPT - : pciDomain( pciDomain_ ) - , pciBus( pciBus_ ) - , pciDevice( pciDevice_ ) - , pciFunction( pciFunction_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT(uint32_t pciDomain_ = {}, uint32_t pciBus_ = {}, uint32_t pciDevice_ = {}, uint32_t pciFunction_ = {}) VULKAN_HPP_NOEXCEPT + : pciDomain( pciDomain_ ), pciBus( pciBus_ ), pciDevice( pciDevice_ ), pciFunction( pciFunction_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT ) - offsetof( PhysicalDevicePCIBusInfoPropertiesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePCIBusInfoPropertiesEXT( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDevicePCIBusInfoPropertiesEXT& operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePCIBusInfoPropertiesEXT & operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDevicePCIBusInfoPropertiesEXT & operator=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDevicePCIBusInfoPropertiesEXT ) ); + return *this; + } + + operator VkPhysicalDevicePCIBusInfoPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -46633,6 +66193,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDevicePCIBusInfoPropertiesEXT const& ) const = default; +#else bool operator==( PhysicalDevicePCIBusInfoPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -46647,6 +66211,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT; @@ -46655,32 +66222,44 @@ namespace VULKAN_HPP_NAMESPACE uint32_t pciBus = {}; uint32_t pciDevice = {}; uint32_t pciFunction = {}; + }; static_assert( sizeof( PhysicalDevicePCIBusInfoPropertiesEXT ) == sizeof( VkPhysicalDevicePCIBusInfoPropertiesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDevicePCIBusInfoPropertiesEXT; + }; + struct PhysicalDevicePerformanceQueryFeaturesKHR { - VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ = {} ) VULKAN_HPP_NOEXCEPT - : performanceCounterQueryPools( performanceCounterQueryPools_ ) - , performanceCounterMultipleQueryPools( performanceCounterMultipleQueryPools_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ = {}, VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ = {}) VULKAN_HPP_NOEXCEPT + : performanceCounterQueryPools( performanceCounterQueryPools_ ), performanceCounterMultipleQueryPools( performanceCounterMultipleQueryPools_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR ) - offsetof( PhysicalDevicePerformanceQueryFeaturesKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePerformanceQueryFeaturesKHR( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDevicePerformanceQueryFeaturesKHR& operator=( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePerformanceQueryFeaturesKHR & operator=( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDevicePerformanceQueryFeaturesKHR & operator=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDevicePerformanceQueryFeaturesKHR ) ); return *this; } @@ -46702,6 +66281,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDevicePerformanceQueryFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -46712,6 +66292,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDevicePerformanceQueryFeaturesKHR const& ) const = default; +#else bool operator==( PhysicalDevicePerformanceQueryFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -46724,39 +66308,57 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools = {}; VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools = {}; + }; static_assert( sizeof( PhysicalDevicePerformanceQueryFeaturesKHR ) == sizeof( VkPhysicalDevicePerformanceQueryFeaturesKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDevicePerformanceQueryFeaturesKHR; + }; + struct PhysicalDevicePerformanceQueryPropertiesKHR { - PhysicalDevicePerformanceQueryPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies_ = {} ) VULKAN_HPP_NOEXCEPT - : allowCommandBufferQueryCopies( allowCommandBufferQueryCopies_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR(VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies_ = {}) VULKAN_HPP_NOEXCEPT + : allowCommandBufferQueryCopies( allowCommandBufferQueryCopies_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR ) - offsetof( PhysicalDevicePerformanceQueryPropertiesKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePerformanceQueryPropertiesKHR( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDevicePerformanceQueryPropertiesKHR& operator=( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePerformanceQueryPropertiesKHR & operator=( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDevicePerformanceQueryPropertiesKHR & operator=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDevicePerformanceQueryPropertiesKHR ) ); + return *this; + } + + operator VkPhysicalDevicePerformanceQueryPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -46767,6 +66369,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDevicePerformanceQueryPropertiesKHR const& ) const = default; +#else bool operator==( PhysicalDevicePerformanceQueryPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -46778,35 +66384,139 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies = {}; + }; static_assert( sizeof( PhysicalDevicePerformanceQueryPropertiesKHR ) == sizeof( VkPhysicalDevicePerformanceQueryPropertiesKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = {} ) VULKAN_HPP_NOEXCEPT - : pipelineExecutableInfo( pipelineExecutableInfo_ ) + using Type = PhysicalDevicePerformanceQueryPropertiesKHR; + }; + + struct PhysicalDevicePipelineCreationCacheControlFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {}) VULKAN_HPP_NOEXCEPT + : pipelineCreationCacheControl( pipelineCreationCacheControl_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeaturesEXT( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineCreationCacheControlFeaturesEXT( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) - offsetof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR, pNext ) ); + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDevicePipelineCreationCacheControlFeaturesEXT & operator=( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDevicePipelineCreationCacheControlFeaturesEXT & operator=( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDevicePipelineCreationCacheControlFeaturesEXT ) ); + return *this; + } + + PhysicalDevicePipelineCreationCacheControlFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDevicePipelineCreationCacheControlFeaturesEXT & setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT + { + pipelineCreationCacheControl = pipelineCreationCacheControl_; + return *this; + } + + + operator VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl ); + } + + bool operator!=( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {}; + + }; + static_assert( sizeof( PhysicalDevicePipelineCreationCacheControlFeaturesEXT ) == sizeof( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineCreationCacheControlFeaturesEXT; + }; + + struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = {}) VULKAN_HPP_NOEXCEPT + : pipelineExecutableInfo( pipelineExecutableInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDevicePipelineExecutablePropertiesFeaturesKHR& operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) ); return *this; } @@ -46822,6 +66532,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -46832,6 +66543,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& ) const = default; +#else bool operator==( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -46843,38 +66558,56 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo = {}; + }; static_assert( sizeof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) == sizeof( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + }; + struct PhysicalDevicePointClippingProperties { - PhysicalDevicePointClippingProperties( VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes ) VULKAN_HPP_NOEXCEPT - : pointClippingBehavior( pointClippingBehavior_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePointClippingProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties(VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes) VULKAN_HPP_NOEXCEPT + : pointClippingBehavior( pointClippingBehavior_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties ) - offsetof( PhysicalDevicePointClippingProperties, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePointClippingProperties( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDevicePointClippingProperties& operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePointClippingProperties & operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDevicePointClippingProperties & operator=( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDevicePointClippingProperties ) ); + return *this; + } + + operator VkPhysicalDevicePointClippingProperties const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -46885,6 +66618,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDevicePointClippingProperties const& ) const = default; +#else bool operator==( PhysicalDevicePointClippingProperties const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -46896,225 +66633,430 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePointClippingProperties; void* pNext = {}; VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes; + }; static_assert( sizeof( PhysicalDevicePointClippingProperties ) == sizeof( VkPhysicalDevicePointClippingProperties ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct PhysicalDeviceSparseProperties + template <> + struct CppType { - PhysicalDeviceSparseProperties( VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = {} ) VULKAN_HPP_NOEXCEPT - : residencyStandard2DBlockShape( residencyStandard2DBlockShape_ ) - , residencyStandard2DMultisampleBlockShape( residencyStandard2DMultisampleBlockShape_ ) - , residencyStandard3DBlockShape( residencyStandard3DBlockShape_ ) - , residencyAlignedMipSize( residencyAlignedMipSize_ ) - , residencyNonResidentStrict( residencyNonResidentStrict_ ) + using Type = PhysicalDevicePointClippingProperties; + }; + using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties; + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct PhysicalDevicePortabilitySubsetFeaturesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ = {}, VULKAN_HPP_NAMESPACE::Bool32 events_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ = {}, VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ = {}, VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ = {}, VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ = {}) VULKAN_HPP_NOEXCEPT + : constantAlphaColorBlendFactors( constantAlphaColorBlendFactors_ ), events( events_ ), imageViewFormatReinterpretation( imageViewFormatReinterpretation_ ), imageViewFormatSwizzle( imageViewFormatSwizzle_ ), imageView2DOn3DImage( imageView2DOn3DImage_ ), multisampleArrayImage( multisampleArrayImage_ ), mutableComparisonSamplers( mutableComparisonSamplers_ ), pointPolygons( pointPolygons_ ), samplerMipLodBias( samplerMipLodBias_ ), separateStencilMaskRef( separateStencilMaskRef_ ), shaderSampleRateInterpolationFunctions( shaderSampleRateInterpolationFunctions_ ), tessellationIsolines( tessellationIsolines_ ), tessellationPointMode( tessellationPointMode_ ), triangleFans( triangleFans_ ), vertexAttributeAccessBeyondStride( vertexAttributeAccessBeyondStride_ ) {} - PhysicalDeviceSparseProperties( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePortabilitySubsetFeaturesKHR( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceSparseProperties& operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePortabilitySubsetFeaturesKHR & operator=( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceSparseProperties const&() const VULKAN_HPP_NOEXCEPT + PhysicalDevicePortabilitySubsetFeaturesKHR & operator=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceSparseProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape ) - && ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape ) - && ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape ) - && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize ) - && ( residencyNonResidentStrict == rhs.residencyNonResidentStrict ); - } - - bool operator!=( PhysicalDeviceSparseProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape = {}; - VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape = {}; - VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape = {}; - VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize = {}; - VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict = {}; - }; - static_assert( sizeof( PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct PhysicalDeviceProperties - { - PhysicalDeviceProperties( uint32_t apiVersion_ = {}, - uint32_t driverVersion_ = {}, - uint32_t vendorID_ = {}, - uint32_t deviceID_ = {}, - VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther, - std::array const& deviceName_ = {}, - std::array const& pipelineCacheUUID_ = {}, - VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {}, - VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {} ) VULKAN_HPP_NOEXCEPT - : apiVersion( apiVersion_ ) - , driverVersion( driverVersion_ ) - , vendorID( vendorID_ ) - , deviceID( deviceID_ ) - , deviceType( deviceType_ ) - , deviceName{} - , pipelineCacheUUID{} - , limits( limits_ ) - , sparseProperties( sparseProperties_ ) - { - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( deviceName, deviceName_ ); - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( pipelineCacheUUID, pipelineCacheUUID_ ); - } - - PhysicalDeviceProperties( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceProperties& operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDevicePortabilitySubsetFeaturesKHR ) ); return *this; } - operator VkPhysicalDeviceProperties const&() const VULKAN_HPP_NOEXCEPT + PhysicalDevicePortabilitySubsetFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( apiVersion == rhs.apiVersion ) - && ( driverVersion == rhs.driverVersion ) - && ( vendorID == rhs.vendorID ) - && ( deviceID == rhs.deviceID ) - && ( deviceType == rhs.deviceType ) - && ( memcmp( deviceName, rhs.deviceName, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE * sizeof( char ) ) == 0 ) - && ( memcmp( pipelineCacheUUID, rhs.pipelineCacheUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 ) - && ( limits == rhs.limits ) - && ( sparseProperties == rhs.sparseProperties ); - } - - bool operator!=( PhysicalDeviceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - uint32_t apiVersion = {}; - uint32_t driverVersion = {}; - uint32_t vendorID = {}; - uint32_t deviceID = {}; - VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther; - char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE] = {}; - uint8_t pipelineCacheUUID[VK_UUID_SIZE] = {}; - VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits = {}; - VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties = {}; - }; - static_assert( sizeof( PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct PhysicalDeviceProperties2 - { - PhysicalDeviceProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {} ) VULKAN_HPP_NOEXCEPT - : properties( properties_ ) - {} - - VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 ) - offsetof( PhysicalDeviceProperties2, pNext ) ); + pNext = pNext_; return *this; } - PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePortabilitySubsetFeaturesKHR & setConstantAlphaColorBlendFactors( VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ ) VULKAN_HPP_NOEXCEPT { - *this = rhs; - } - - PhysicalDeviceProperties2& operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); + constantAlphaColorBlendFactors = constantAlphaColorBlendFactors_; return *this; } - operator VkPhysicalDeviceProperties2 const&() const VULKAN_HPP_NOEXCEPT + PhysicalDevicePortabilitySubsetFeaturesKHR & setEvents( VULKAN_HPP_NAMESPACE::Bool32 events_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + events = events_; + return *this; } - operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT + PhysicalDevicePortabilitySubsetFeaturesKHR & setImageViewFormatReinterpretation( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + imageViewFormatReinterpretation = imageViewFormatReinterpretation_; + return *this; } - bool operator==( PhysicalDeviceProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + PhysicalDevicePortabilitySubsetFeaturesKHR & setImageViewFormatSwizzle( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ ) VULKAN_HPP_NOEXCEPT + { + imageViewFormatSwizzle = imageViewFormatSwizzle_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & setImageView2DOn3DImage( VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ ) VULKAN_HPP_NOEXCEPT + { + imageView2DOn3DImage = imageView2DOn3DImage_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & setMultisampleArrayImage( VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ ) VULKAN_HPP_NOEXCEPT + { + multisampleArrayImage = multisampleArrayImage_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & setMutableComparisonSamplers( VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ ) VULKAN_HPP_NOEXCEPT + { + mutableComparisonSamplers = mutableComparisonSamplers_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & setPointPolygons( VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ ) VULKAN_HPP_NOEXCEPT + { + pointPolygons = pointPolygons_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & setSamplerMipLodBias( VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ ) VULKAN_HPP_NOEXCEPT + { + samplerMipLodBias = samplerMipLodBias_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & setSeparateStencilMaskRef( VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ ) VULKAN_HPP_NOEXCEPT + { + separateStencilMaskRef = separateStencilMaskRef_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & setShaderSampleRateInterpolationFunctions( VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ ) VULKAN_HPP_NOEXCEPT + { + shaderSampleRateInterpolationFunctions = shaderSampleRateInterpolationFunctions_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & setTessellationIsolines( VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ ) VULKAN_HPP_NOEXCEPT + { + tessellationIsolines = tessellationIsolines_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & setTessellationPointMode( VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ ) VULKAN_HPP_NOEXCEPT + { + tessellationPointMode = tessellationPointMode_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & setTriangleFans( VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ ) VULKAN_HPP_NOEXCEPT + { + triangleFans = triangleFans_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & setVertexAttributeAccessBeyondStride( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeAccessBeyondStride = vertexAttributeAccessBeyondStride_; + return *this; + } + + + operator VkPhysicalDevicePortabilitySubsetFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePortabilitySubsetFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDevicePortabilitySubsetFeaturesKHR const& ) const = default; +#else + bool operator==( PhysicalDevicePortabilitySubsetFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) - && ( properties == rhs.properties ); + && ( constantAlphaColorBlendFactors == rhs.constantAlphaColorBlendFactors ) + && ( events == rhs.events ) + && ( imageViewFormatReinterpretation == rhs.imageViewFormatReinterpretation ) + && ( imageViewFormatSwizzle == rhs.imageViewFormatSwizzle ) + && ( imageView2DOn3DImage == rhs.imageView2DOn3DImage ) + && ( multisampleArrayImage == rhs.multisampleArrayImage ) + && ( mutableComparisonSamplers == rhs.mutableComparisonSamplers ) + && ( pointPolygons == rhs.pointPolygons ) + && ( samplerMipLodBias == rhs.samplerMipLodBias ) + && ( separateStencilMaskRef == rhs.separateStencilMaskRef ) + && ( shaderSampleRateInterpolationFunctions == rhs.shaderSampleRateInterpolationFunctions ) + && ( tessellationIsolines == rhs.tessellationIsolines ) + && ( tessellationPointMode == rhs.tessellationPointMode ) + && ( triangleFans == rhs.triangleFans ) + && ( vertexAttributeAccessBeyondStride == rhs.vertexAttributeAccessBeyondStride ); } - bool operator!=( PhysicalDeviceProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDevicePortabilitySubsetFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif + + public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProperties2; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR; void* pNext = {}; - VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties = {}; + VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors = {}; + VULKAN_HPP_NAMESPACE::Bool32 events = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage = {}; + VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage = {}; + VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers = {}; + VULKAN_HPP_NAMESPACE::Bool32 pointPolygons = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias = {}; + VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions = {}; + VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines = {}; + VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 triangleFans = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride = {}; + + }; + static_assert( sizeof( PhysicalDevicePortabilitySubsetFeaturesKHR ) == sizeof( VkPhysicalDevicePortabilitySubsetFeaturesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePortabilitySubsetFeaturesKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct PhysicalDevicePortabilitySubsetPropertiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR(uint32_t minVertexInputBindingStrideAlignment_ = {}) VULKAN_HPP_NOEXCEPT + : minVertexInputBindingStrideAlignment( minVertexInputBindingStrideAlignment_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePortabilitySubsetPropertiesKHR( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDevicePortabilitySubsetPropertiesKHR ) ); + return *this; + } + + PhysicalDevicePortabilitySubsetPropertiesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDevicePortabilitySubsetPropertiesKHR & setMinVertexInputBindingStrideAlignment( uint32_t minVertexInputBindingStrideAlignment_ ) VULKAN_HPP_NOEXCEPT + { + minVertexInputBindingStrideAlignment = minVertexInputBindingStrideAlignment_; + return *this; + } + + + operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePortabilitySubsetPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDevicePortabilitySubsetPropertiesKHR const& ) const = default; +#else + bool operator==( PhysicalDevicePortabilitySubsetPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( minVertexInputBindingStrideAlignment == rhs.minVertexInputBindingStrideAlignment ); + } + + bool operator!=( PhysicalDevicePortabilitySubsetPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR; + void* pNext = {}; + uint32_t minVertexInputBindingStrideAlignment = {}; + + }; + static_assert( sizeof( PhysicalDevicePortabilitySubsetPropertiesKHR ) == sizeof( VkPhysicalDevicePortabilitySubsetPropertiesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePortabilitySubsetPropertiesKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct PhysicalDevicePrivateDataFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrivateDataFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {}) VULKAN_HPP_NOEXCEPT + : privateData( privateData_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeaturesEXT( PhysicalDevicePrivateDataFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePrivateDataFeaturesEXT( VkPhysicalDevicePrivateDataFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDevicePrivateDataFeaturesEXT & operator=( VkPhysicalDevicePrivateDataFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDevicePrivateDataFeaturesEXT & operator=( PhysicalDevicePrivateDataFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDevicePrivateDataFeaturesEXT ) ); + return *this; + } + + PhysicalDevicePrivateDataFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDevicePrivateDataFeaturesEXT & setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT + { + privateData = privateData_; + return *this; + } + + + operator VkPhysicalDevicePrivateDataFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePrivateDataFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDevicePrivateDataFeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDevicePrivateDataFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( privateData == rhs.privateData ); + } + + bool operator!=( PhysicalDevicePrivateDataFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrivateDataFeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 privateData = {}; + + }; + static_assert( sizeof( PhysicalDevicePrivateDataFeaturesEXT ) == sizeof( VkPhysicalDevicePrivateDataFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePrivateDataFeaturesEXT; }; - static_assert( sizeof( PhysicalDeviceProperties2 ) == sizeof( VkPhysicalDeviceProperties2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PhysicalDeviceProtectedMemoryFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {} ) VULKAN_HPP_NOEXCEPT - : protectedMemory( protectedMemory_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures(VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}) VULKAN_HPP_NOEXCEPT + : protectedMemory( protectedMemory_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures ) - offsetof( PhysicalDeviceProtectedMemoryFeatures, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceProtectedMemoryFeatures& operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceProtectedMemoryFeatures & operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceProtectedMemoryFeatures & operator=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceProtectedMemoryFeatures ) ); return *this; } @@ -47130,6 +67072,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceProtectedMemoryFeatures const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -47140,6 +67083,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceProtectedMemoryFeatures const& ) const = default; +#else bool operator==( PhysicalDeviceProtectedMemoryFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -47151,38 +67098,56 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {}; + }; static_assert( sizeof( PhysicalDeviceProtectedMemoryFeatures ) == sizeof( VkPhysicalDeviceProtectedMemoryFeatures ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceProtectedMemoryFeatures; + }; + struct PhysicalDeviceProtectedMemoryProperties { - PhysicalDeviceProtectedMemoryProperties( VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {} ) VULKAN_HPP_NOEXCEPT - : protectedNoFault( protectedNoFault_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties(VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}) VULKAN_HPP_NOEXCEPT + : protectedNoFault( protectedNoFault_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties ) - offsetof( PhysicalDeviceProtectedMemoryProperties, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceProtectedMemoryProperties& operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceProtectedMemoryProperties & operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceProtectedMemoryProperties & operator=( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceProtectedMemoryProperties ) ); + return *this; + } + + operator VkPhysicalDeviceProtectedMemoryProperties const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -47193,6 +67158,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceProtectedMemoryProperties const& ) const = default; +#else bool operator==( PhysicalDeviceProtectedMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -47204,38 +67173,56 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {}; + }; static_assert( sizeof( PhysicalDeviceProtectedMemoryProperties ) == sizeof( VkPhysicalDeviceProtectedMemoryProperties ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceProtectedMemoryProperties; + }; + struct PhysicalDevicePushDescriptorPropertiesKHR { - PhysicalDevicePushDescriptorPropertiesKHR( uint32_t maxPushDescriptors_ = {} ) VULKAN_HPP_NOEXCEPT - : maxPushDescriptors( maxPushDescriptors_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR(uint32_t maxPushDescriptors_ = {}) VULKAN_HPP_NOEXCEPT + : maxPushDescriptors( maxPushDescriptors_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR ) - offsetof( PhysicalDevicePushDescriptorPropertiesKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDevicePushDescriptorPropertiesKHR& operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePushDescriptorPropertiesKHR & operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDevicePushDescriptorPropertiesKHR & operator=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) ); + return *this; + } + + operator VkPhysicalDevicePushDescriptorPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -47246,6 +67233,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDevicePushDescriptorPropertiesKHR const& ) const = default; +#else bool operator==( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -47257,52 +67248,302 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR; void* pNext = {}; uint32_t maxPushDescriptors = {}; + }; static_assert( sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) == sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct PhysicalDeviceRayTracingPropertiesNV + template <> + struct CppType { - PhysicalDeviceRayTracingPropertiesNV( uint32_t shaderGroupHandleSize_ = {}, - uint32_t maxRecursionDepth_ = {}, - uint32_t maxShaderGroupStride_ = {}, - uint32_t shaderGroupBaseAlignment_ = {}, - uint64_t maxGeometryCount_ = {}, - uint64_t maxInstanceCount_ = {}, - uint64_t maxTriangleCount_ = {}, - uint32_t maxDescriptorSetAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderGroupHandleSize( shaderGroupHandleSize_ ) - , maxRecursionDepth( maxRecursionDepth_ ) - , maxShaderGroupStride( maxShaderGroupStride_ ) - , shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ) - , maxGeometryCount( maxGeometryCount_ ) - , maxInstanceCount( maxInstanceCount_ ) - , maxTriangleCount( maxTriangleCount_ ) - , maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ ) + using Type = PhysicalDevicePushDescriptorPropertiesKHR; + }; + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct PhysicalDeviceRayTracingFeaturesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 rayTracing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplayMixed_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingAccelerationStructureCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectTraceRays_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectAccelerationStructureBuild_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingHostAccelerationStructureCommands_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingPrimitiveCulling_ = {}) VULKAN_HPP_NOEXCEPT + : rayTracing( rayTracing_ ), rayTracingShaderGroupHandleCaptureReplay( rayTracingShaderGroupHandleCaptureReplay_ ), rayTracingShaderGroupHandleCaptureReplayMixed( rayTracingShaderGroupHandleCaptureReplayMixed_ ), rayTracingAccelerationStructureCaptureReplay( rayTracingAccelerationStructureCaptureReplay_ ), rayTracingIndirectTraceRays( rayTracingIndirectTraceRays_ ), rayTracingIndirectAccelerationStructureBuild( rayTracingIndirectAccelerationStructureBuild_ ), rayTracingHostAccelerationStructureCommands( rayTracingHostAccelerationStructureCommands_ ), rayQuery( rayQuery_ ), rayTracingPrimitiveCulling( rayTracingPrimitiveCulling_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingFeaturesKHR( PhysicalDeviceRayTracingFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingFeaturesKHR( VkPhysicalDeviceRayTracingFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV ) - offsetof( PhysicalDeviceRayTracingPropertiesNV, pNext ) ); + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDeviceRayTracingFeaturesKHR & operator=( VkPhysicalDeviceRayTracingFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceRayTracingFeaturesKHR & operator=( PhysicalDeviceRayTracingFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceRayTracingFeaturesKHR ) ); + return *this; + } + + PhysicalDeviceRayTracingFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceRayTracingFeaturesKHR & setRayTracing( VULKAN_HPP_NAMESPACE::Bool32 rayTracing_ ) VULKAN_HPP_NOEXCEPT + { + rayTracing = rayTracing_; + return *this; + } + + PhysicalDeviceRayTracingFeaturesKHR & setRayTracingShaderGroupHandleCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingShaderGroupHandleCaptureReplay = rayTracingShaderGroupHandleCaptureReplay_; + return *this; + } + + PhysicalDeviceRayTracingFeaturesKHR & setRayTracingShaderGroupHandleCaptureReplayMixed( VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplayMixed_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingShaderGroupHandleCaptureReplayMixed = rayTracingShaderGroupHandleCaptureReplayMixed_; + return *this; + } + + PhysicalDeviceRayTracingFeaturesKHR & setRayTracingAccelerationStructureCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 rayTracingAccelerationStructureCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingAccelerationStructureCaptureReplay = rayTracingAccelerationStructureCaptureReplay_; + return *this; + } + + PhysicalDeviceRayTracingFeaturesKHR & setRayTracingIndirectTraceRays( VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectTraceRays_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingIndirectTraceRays = rayTracingIndirectTraceRays_; + return *this; + } + + PhysicalDeviceRayTracingFeaturesKHR & setRayTracingIndirectAccelerationStructureBuild( VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectAccelerationStructureBuild_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingIndirectAccelerationStructureBuild = rayTracingIndirectAccelerationStructureBuild_; + return *this; + } + + PhysicalDeviceRayTracingFeaturesKHR & setRayTracingHostAccelerationStructureCommands( VULKAN_HPP_NAMESPACE::Bool32 rayTracingHostAccelerationStructureCommands_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingHostAccelerationStructureCommands = rayTracingHostAccelerationStructureCommands_; + return *this; + } + + PhysicalDeviceRayTracingFeaturesKHR & setRayQuery( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ ) VULKAN_HPP_NOEXCEPT + { + rayQuery = rayQuery_; + return *this; + } + + PhysicalDeviceRayTracingFeaturesKHR & setRayTracingPrimitiveCulling( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPrimitiveCulling_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPrimitiveCulling = rayTracingPrimitiveCulling_; + return *this; + } + + + operator VkPhysicalDeviceRayTracingFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceRayTracingFeaturesKHR const& ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( rayTracing == rhs.rayTracing ) + && ( rayTracingShaderGroupHandleCaptureReplay == rhs.rayTracingShaderGroupHandleCaptureReplay ) + && ( rayTracingShaderGroupHandleCaptureReplayMixed == rhs.rayTracingShaderGroupHandleCaptureReplayMixed ) + && ( rayTracingAccelerationStructureCaptureReplay == rhs.rayTracingAccelerationStructureCaptureReplay ) + && ( rayTracingIndirectTraceRays == rhs.rayTracingIndirectTraceRays ) + && ( rayTracingIndirectAccelerationStructureBuild == rhs.rayTracingIndirectAccelerationStructureBuild ) + && ( rayTracingHostAccelerationStructureCommands == rhs.rayTracingHostAccelerationStructureCommands ) + && ( rayQuery == rhs.rayQuery ) + && ( rayTracingPrimitiveCulling == rhs.rayTracingPrimitiveCulling ); + } + + bool operator!=( PhysicalDeviceRayTracingFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingFeaturesKHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracing = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplayMixed = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingAccelerationStructureCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectTraceRays = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectAccelerationStructureBuild = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingHostAccelerationStructureCommands = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayQuery = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPrimitiveCulling = {}; + + }; + static_assert( sizeof( PhysicalDeviceRayTracingFeaturesKHR ) == sizeof( VkPhysicalDeviceRayTracingFeaturesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingFeaturesKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct PhysicalDeviceRayTracingPropertiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesKHR(uint32_t shaderGroupHandleSize_ = {}, uint32_t maxRecursionDepth_ = {}, uint32_t maxShaderGroupStride_ = {}, uint32_t shaderGroupBaseAlignment_ = {}, uint64_t maxGeometryCount_ = {}, uint64_t maxInstanceCount_ = {}, uint64_t maxPrimitiveCount_ = {}, uint32_t maxDescriptorSetAccelerationStructures_ = {}, uint32_t shaderGroupHandleCaptureReplaySize_ = {}) VULKAN_HPP_NOEXCEPT + : shaderGroupHandleSize( shaderGroupHandleSize_ ), maxRecursionDepth( maxRecursionDepth_ ), maxShaderGroupStride( maxShaderGroupStride_ ), shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ), maxGeometryCount( maxGeometryCount_ ), maxInstanceCount( maxInstanceCount_ ), maxPrimitiveCount( maxPrimitiveCount_ ), maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ ), shaderGroupHandleCaptureReplaySize( shaderGroupHandleCaptureReplaySize_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesKHR( PhysicalDeviceRayTracingPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPropertiesKHR( VkPhysicalDeviceRayTracingPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDeviceRayTracingPropertiesKHR & operator=( VkPhysicalDeviceRayTracingPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceRayTracingPropertiesKHR & operator=( PhysicalDeviceRayTracingPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceRayTracingPropertiesKHR ) ); + return *this; + } + + + operator VkPhysicalDeviceRayTracingPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceRayTracingPropertiesKHR const& ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) + && ( maxRecursionDepth == rhs.maxRecursionDepth ) + && ( maxShaderGroupStride == rhs.maxShaderGroupStride ) + && ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) + && ( maxGeometryCount == rhs.maxGeometryCount ) + && ( maxInstanceCount == rhs.maxInstanceCount ) + && ( maxPrimitiveCount == rhs.maxPrimitiveCount ) + && ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ) + && ( shaderGroupHandleCaptureReplaySize == rhs.shaderGroupHandleCaptureReplaySize ); + } + + bool operator!=( PhysicalDeviceRayTracingPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesKHR; + void* pNext = {}; + uint32_t shaderGroupHandleSize = {}; + uint32_t maxRecursionDepth = {}; + uint32_t maxShaderGroupStride = {}; + uint32_t shaderGroupBaseAlignment = {}; + uint64_t maxGeometryCount = {}; + uint64_t maxInstanceCount = {}; + uint64_t maxPrimitiveCount = {}; + uint32_t maxDescriptorSetAccelerationStructures = {}; + uint32_t shaderGroupHandleCaptureReplaySize = {}; + + }; + static_assert( sizeof( PhysicalDeviceRayTracingPropertiesKHR ) == sizeof( VkPhysicalDeviceRayTracingPropertiesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingPropertiesKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct PhysicalDeviceRayTracingPropertiesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV(uint32_t shaderGroupHandleSize_ = {}, uint32_t maxRecursionDepth_ = {}, uint32_t maxShaderGroupStride_ = {}, uint32_t shaderGroupBaseAlignment_ = {}, uint64_t maxGeometryCount_ = {}, uint64_t maxInstanceCount_ = {}, uint64_t maxTriangleCount_ = {}, uint32_t maxDescriptorSetAccelerationStructures_ = {}) VULKAN_HPP_NOEXCEPT + : shaderGroupHandleSize( shaderGroupHandleSize_ ), maxRecursionDepth( maxRecursionDepth_ ), maxShaderGroupStride( maxShaderGroupStride_ ), shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ), maxGeometryCount( maxGeometryCount_ ), maxInstanceCount( maxInstanceCount_ ), maxTriangleCount( maxTriangleCount_ ), maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceRayTracingPropertiesNV( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceRayTracingPropertiesNV& operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceRayTracingPropertiesNV & operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceRayTracingPropertiesNV & operator=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceRayTracingPropertiesNV ) ); + return *this; + } + + operator VkPhysicalDeviceRayTracingPropertiesNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -47313,6 +67554,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceRayTracingPropertiesNV const& ) const = default; +#else bool operator==( PhysicalDeviceRayTracingPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -47331,6 +67576,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesNV; @@ -47343,30 +67591,44 @@ namespace VULKAN_HPP_NAMESPACE uint64_t maxInstanceCount = {}; uint64_t maxTriangleCount = {}; uint32_t maxDescriptorSetAccelerationStructures = {}; + }; static_assert( sizeof( PhysicalDeviceRayTracingPropertiesNV ) == sizeof( VkPhysicalDeviceRayTracingPropertiesNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingPropertiesNV; + }; + struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ = {} ) VULKAN_HPP_NOEXCEPT - : representativeFragmentTest( representativeFragmentTest_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ = {}) VULKAN_HPP_NOEXCEPT + : representativeFragmentTest( representativeFragmentTest_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) - offsetof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceRepresentativeFragmentTestFeaturesNV& operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) ); return *this; } @@ -47382,6 +67644,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -47392,6 +67655,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& ) const = default; +#else bool operator==( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -47403,48 +67670,236 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest = {}; + }; static_assert( sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) == sizeof( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct PhysicalDeviceSampleLocationsPropertiesEXT + template <> + struct CppType { - PhysicalDeviceSampleLocationsPropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, - std::array const& sampleLocationCoordinateRange_ = {}, - uint32_t sampleLocationSubPixelBits_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT - : sampleLocationSampleCounts( sampleLocationSampleCounts_ ) - , maxSampleLocationGridSize( maxSampleLocationGridSize_ ) - , sampleLocationCoordinateRange{} - , sampleLocationSubPixelBits( sampleLocationSubPixelBits_ ) - , variableSampleLocations( variableSampleLocations_ ) - { - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( sampleLocationCoordinateRange, sampleLocationCoordinateRange_ ); - } + using Type = PhysicalDeviceRepresentativeFragmentTestFeaturesNV; + }; - VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + struct PhysicalDeviceRobustness2FeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ = {}) VULKAN_HPP_NOEXCEPT + : robustBufferAccess2( robustBufferAccess2_ ), robustImageAccess2( robustImageAccess2_ ), nullDescriptor( nullDescriptor_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRobustness2FeaturesEXT( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT ) - offsetof( PhysicalDeviceSampleLocationsPropertiesEXT, pNext ) ); + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDeviceRobustness2FeaturesEXT & operator=( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceRobustness2FeaturesEXT & operator=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceRobustness2FeaturesEXT ) ); + return *this; + } + + PhysicalDeviceRobustness2FeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceRobustness2FeaturesEXT & setRobustBufferAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ ) VULKAN_HPP_NOEXCEPT + { + robustBufferAccess2 = robustBufferAccess2_; + return *this; + } + + PhysicalDeviceRobustness2FeaturesEXT & setRobustImageAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ ) VULKAN_HPP_NOEXCEPT + { + robustImageAccess2 = robustImageAccess2_; + return *this; + } + + PhysicalDeviceRobustness2FeaturesEXT & setNullDescriptor( VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ ) VULKAN_HPP_NOEXCEPT + { + nullDescriptor = nullDescriptor_; + return *this; + } + + + operator VkPhysicalDeviceRobustness2FeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRobustness2FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceRobustness2FeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceRobustness2FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( robustBufferAccess2 == rhs.robustBufferAccess2 ) + && ( robustImageAccess2 == rhs.robustImageAccess2 ) + && ( nullDescriptor == rhs.nullDescriptor ); + } + + bool operator!=( PhysicalDeviceRobustness2FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor = {}; + + }; + static_assert( sizeof( PhysicalDeviceRobustness2FeaturesEXT ) == sizeof( VkPhysicalDeviceRobustness2FeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceRobustness2FeaturesEXT; + }; + + struct PhysicalDeviceRobustness2PropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT(VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment_ = {}) VULKAN_HPP_NOEXCEPT + : robustStorageBufferAccessSizeAlignment( robustStorageBufferAccessSizeAlignment_ ), robustUniformBufferAccessSizeAlignment( robustUniformBufferAccessSizeAlignment_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRobustness2PropertiesEXT( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDeviceRobustness2PropertiesEXT & operator=( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceRobustness2PropertiesEXT & operator=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceRobustness2PropertiesEXT ) ); + return *this; + } + + + operator VkPhysicalDeviceRobustness2PropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRobustness2PropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceRobustness2PropertiesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceRobustness2PropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( robustStorageBufferAccessSizeAlignment == rhs.robustStorageBufferAccessSizeAlignment ) + && ( robustUniformBufferAccessSizeAlignment == rhs.robustUniformBufferAccessSizeAlignment ); + } + + bool operator!=( PhysicalDeviceRobustness2PropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment = {}; + + }; + static_assert( sizeof( PhysicalDeviceRobustness2PropertiesEXT ) == sizeof( VkPhysicalDeviceRobustness2PropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceRobustness2PropertiesEXT; + }; + + struct PhysicalDeviceSampleLocationsPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT(VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, std::array const& sampleLocationCoordinateRange_ = {}, uint32_t sampleLocationSubPixelBits_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations_ = {}) VULKAN_HPP_NOEXCEPT + : sampleLocationSampleCounts( sampleLocationSampleCounts_ ), maxSampleLocationGridSize( maxSampleLocationGridSize_ ), sampleLocationCoordinateRange( sampleLocationCoordinateRange_ ), sampleLocationSubPixelBits( sampleLocationSubPixelBits_ ), variableSampleLocations( variableSampleLocations_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceSampleLocationsPropertiesEXT( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceSampleLocationsPropertiesEXT& operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSampleLocationsPropertiesEXT & operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceSampleLocationsPropertiesEXT & operator=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) ); + return *this; + } + + operator VkPhysicalDeviceSampleLocationsPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -47455,13 +67910,17 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceSampleLocationsPropertiesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts ) && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ) - && ( memcmp( sampleLocationCoordinateRange, rhs.sampleLocationCoordinateRange, 2 * sizeof( float ) ) == 0 ) + && ( sampleLocationCoordinateRange == rhs.sampleLocationCoordinateRange ) && ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits ) && ( variableSampleLocations == rhs.variableSampleLocations ); } @@ -47470,44 +67929,60 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT; void* pNext = {}; VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts = {}; VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; - float sampleLocationCoordinateRange[2] = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D sampleLocationCoordinateRange = {}; uint32_t sampleLocationSubPixelBits = {}; VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations = {}; + }; static_assert( sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) == sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceSampleLocationsPropertiesEXT; + }; + struct PhysicalDeviceSamplerFilterMinmaxProperties { - PhysicalDeviceSamplerFilterMinmaxProperties( VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {} ) VULKAN_HPP_NOEXCEPT - : filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ) - , filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties(VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}) VULKAN_HPP_NOEXCEPT + : filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ), filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties ) - offsetof( PhysicalDeviceSamplerFilterMinmaxProperties, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSamplerFilterMinmaxProperties( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceSamplerFilterMinmaxProperties& operator=( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSamplerFilterMinmaxProperties & operator=( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceSamplerFilterMinmaxProperties & operator=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceSamplerFilterMinmaxProperties ) ); + return *this; + } + + operator VkPhysicalDeviceSamplerFilterMinmaxProperties const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -47518,6 +67993,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceSamplerFilterMinmaxProperties const& ) const = default; +#else bool operator==( PhysicalDeviceSamplerFilterMinmaxProperties const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -47530,36 +68009,54 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {}; VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {}; + }; static_assert( sizeof( PhysicalDeviceSamplerFilterMinmaxProperties ) == sizeof( VkPhysicalDeviceSamplerFilterMinmaxProperties ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceSamplerFilterMinmaxProperties; + }; + using PhysicalDeviceSamplerFilterMinmaxPropertiesEXT = PhysicalDeviceSamplerFilterMinmaxProperties; + struct PhysicalDeviceSamplerYcbcrConversionFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {} ) VULKAN_HPP_NOEXCEPT - : samplerYcbcrConversion( samplerYcbcrConversion_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures(VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}) VULKAN_HPP_NOEXCEPT + : samplerYcbcrConversion( samplerYcbcrConversion_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures ) - offsetof( PhysicalDeviceSamplerYcbcrConversionFeatures, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceSamplerYcbcrConversionFeatures& operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) ); return *this; } @@ -47575,6 +68072,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -47585,6 +68083,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceSamplerYcbcrConversionFeatures const& ) const = default; +#else bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -47596,35 +68098,53 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {}; + }; static_assert( sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) == sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceSamplerYcbcrConversionFeatures; + }; + using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures; + struct PhysicalDeviceScalarBlockLayoutFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {} ) VULKAN_HPP_NOEXCEPT - : scalarBlockLayout( scalarBlockLayout_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures(VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}) VULKAN_HPP_NOEXCEPT + : scalarBlockLayout( scalarBlockLayout_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures ) - offsetof( PhysicalDeviceScalarBlockLayoutFeatures, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceScalarBlockLayoutFeatures( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceScalarBlockLayoutFeatures& operator=( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceScalarBlockLayoutFeatures & operator=( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceScalarBlockLayoutFeatures & operator=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceScalarBlockLayoutFeatures ) ); return *this; } @@ -47640,6 +68160,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceScalarBlockLayoutFeatures const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -47650,6 +68171,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceScalarBlockLayoutFeatures const& ) const = default; +#else bool operator==( PhysicalDeviceScalarBlockLayoutFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -47661,35 +68186,53 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {}; + }; static_assert( sizeof( PhysicalDeviceScalarBlockLayoutFeatures ) == sizeof( VkPhysicalDeviceScalarBlockLayoutFeatures ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceScalarBlockLayoutFeatures; + }; + using PhysicalDeviceScalarBlockLayoutFeaturesEXT = PhysicalDeviceScalarBlockLayoutFeatures; + struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {} ) VULKAN_HPP_NOEXCEPT - : separateDepthStencilLayouts( separateDepthStencilLayouts_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures(VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}) VULKAN_HPP_NOEXCEPT + : separateDepthStencilLayouts( separateDepthStencilLayouts_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures ) - offsetof( PhysicalDeviceSeparateDepthStencilLayoutsFeatures, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceSeparateDepthStencilLayoutsFeatures& operator=( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSeparateDepthStencilLayoutsFeatures & operator=( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceSeparateDepthStencilLayoutsFeatures & operator=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceSeparateDepthStencilLayoutsFeatures ) ); return *this; } @@ -47705,6 +68248,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -47715,6 +68259,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const& ) const = default; +#else bool operator==( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -47726,37 +68274,228 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {}; + }; static_assert( sizeof( PhysicalDeviceSeparateDepthStencilLayoutsFeatures ) == sizeof( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct PhysicalDeviceShaderAtomicInt64Features + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ) - , shaderSharedInt64Atomics( shaderSharedInt64Atomics_ ) + using Type = PhysicalDeviceSeparateDepthStencilLayoutsFeatures; + }; + using PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = PhysicalDeviceSeparateDepthStencilLayoutsFeatures; + + struct PhysicalDeviceShaderAtomicFloatFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ = {}) VULKAN_HPP_NOEXCEPT + : shaderBufferFloat32Atomics( shaderBufferFloat32Atomics_ ), shaderBufferFloat32AtomicAdd( shaderBufferFloat32AtomicAdd_ ), shaderBufferFloat64Atomics( shaderBufferFloat64Atomics_ ), shaderBufferFloat64AtomicAdd( shaderBufferFloat64AtomicAdd_ ), shaderSharedFloat32Atomics( shaderSharedFloat32Atomics_ ), shaderSharedFloat32AtomicAdd( shaderSharedFloat32AtomicAdd_ ), shaderSharedFloat64Atomics( shaderSharedFloat64Atomics_ ), shaderSharedFloat64AtomicAdd( shaderSharedFloat64AtomicAdd_ ), shaderImageFloat32Atomics( shaderImageFloat32Atomics_ ), shaderImageFloat32AtomicAdd( shaderImageFloat32AtomicAdd_ ), sparseImageFloat32Atomics( sparseImageFloat32Atomics_ ), sparseImageFloat32AtomicAdd( sparseImageFloat32AtomicAdd_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderAtomicFloatFeaturesEXT( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features ) - offsetof( PhysicalDeviceShaderAtomicInt64Features, pNext ) ); + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & operator=( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceShaderAtomicFloatFeaturesEXT & operator=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceShaderAtomicFloatFeaturesEXT ) ); + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat32Atomics = shaderBufferFloat32Atomics_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat32AtomicAdd = shaderBufferFloat32AtomicAdd_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat64Atomics = shaderBufferFloat64Atomics_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat64AtomicAdd = shaderBufferFloat64AtomicAdd_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat32Atomics = shaderSharedFloat32Atomics_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat32AtomicAdd = shaderSharedFloat32AtomicAdd_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat64Atomics = shaderSharedFloat64Atomics_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat64AtomicAdd = shaderSharedFloat64AtomicAdd_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageFloat32Atomics = shaderImageFloat32Atomics_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageFloat32AtomicAdd = shaderImageFloat32AtomicAdd_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & setSparseImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + { + sparseImageFloat32Atomics = sparseImageFloat32Atomics_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & setSparseImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + sparseImageFloat32AtomicAdd = sparseImageFloat32AtomicAdd_; + return *this; + } + + + operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceShaderAtomicFloatFeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceShaderAtomicFloatFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shaderBufferFloat32Atomics == rhs.shaderBufferFloat32Atomics ) + && ( shaderBufferFloat32AtomicAdd == rhs.shaderBufferFloat32AtomicAdd ) + && ( shaderBufferFloat64Atomics == rhs.shaderBufferFloat64Atomics ) + && ( shaderBufferFloat64AtomicAdd == rhs.shaderBufferFloat64AtomicAdd ) + && ( shaderSharedFloat32Atomics == rhs.shaderSharedFloat32Atomics ) + && ( shaderSharedFloat32AtomicAdd == rhs.shaderSharedFloat32AtomicAdd ) + && ( shaderSharedFloat64Atomics == rhs.shaderSharedFloat64Atomics ) + && ( shaderSharedFloat64AtomicAdd == rhs.shaderSharedFloat64AtomicAdd ) + && ( shaderImageFloat32Atomics == rhs.shaderImageFloat32Atomics ) + && ( shaderImageFloat32AtomicAdd == rhs.shaderImageFloat32AtomicAdd ) + && ( sparseImageFloat32Atomics == rhs.sparseImageFloat32Atomics ) + && ( sparseImageFloat32AtomicAdd == rhs.sparseImageFloat32AtomicAdd ); + } + + bool operator!=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd = {}; + + }; + static_assert( sizeof( PhysicalDeviceShaderAtomicFloatFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderAtomicFloatFeaturesEXT; + }; + + struct PhysicalDeviceShaderAtomicInt64Features + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicInt64Features; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}) VULKAN_HPP_NOEXCEPT + : shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ), shaderSharedInt64Atomics( shaderSharedInt64Atomics_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceShaderAtomicInt64Features( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceShaderAtomicInt64Features& operator=( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderAtomicInt64Features & operator=( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderAtomicInt64Features & operator=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceShaderAtomicInt64Features ) ); return *this; } @@ -47778,6 +68517,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceShaderAtomicInt64Features const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -47788,6 +68528,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceShaderAtomicInt64Features const& ) const = default; +#else bool operator==( PhysicalDeviceShaderAtomicInt64Features const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -47800,38 +68544,54 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64Features; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {}; + }; static_assert( sizeof( PhysicalDeviceShaderAtomicInt64Features ) == sizeof( VkPhysicalDeviceShaderAtomicInt64Features ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceShaderAtomicInt64Features; + }; + using PhysicalDeviceShaderAtomicInt64FeaturesKHR = PhysicalDeviceShaderAtomicInt64Features; + struct PhysicalDeviceShaderClockFeaturesKHR { - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderSubgroupClock( shaderSubgroupClock_ ) - , shaderDeviceClock( shaderDeviceClock_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ = {}) VULKAN_HPP_NOEXCEPT + : shaderSubgroupClock( shaderSubgroupClock_ ), shaderDeviceClock( shaderDeviceClock_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR ) - offsetof( PhysicalDeviceShaderClockFeaturesKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderClockFeaturesKHR( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceShaderClockFeaturesKHR& operator=( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderClockFeaturesKHR & operator=( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderClockFeaturesKHR & operator=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceShaderClockFeaturesKHR ) ); return *this; } @@ -47853,6 +68613,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceShaderClockFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -47863,6 +68624,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceShaderClockFeaturesKHR const& ) const = default; +#else bool operator==( PhysicalDeviceShaderClockFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -47875,41 +68640,57 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock = {}; + }; static_assert( sizeof( PhysicalDeviceShaderClockFeaturesKHR ) == sizeof( VkPhysicalDeviceShaderClockFeaturesKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceShaderClockFeaturesKHR; + }; + struct PhysicalDeviceShaderCoreProperties2AMD { - PhysicalDeviceShaderCoreProperties2AMD( VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_ = {}, - uint32_t activeComputeUnitCount_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderCoreFeatures( shaderCoreFeatures_ ) - , activeComputeUnitCount( activeComputeUnitCount_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD(VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_ = {}, uint32_t activeComputeUnitCount_ = {}) VULKAN_HPP_NOEXCEPT + : shaderCoreFeatures( shaderCoreFeatures_ ), activeComputeUnitCount( activeComputeUnitCount_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD ) - offsetof( PhysicalDeviceShaderCoreProperties2AMD, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderCoreProperties2AMD( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceShaderCoreProperties2AMD& operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderCoreProperties2AMD & operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceShaderCoreProperties2AMD & operator=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceShaderCoreProperties2AMD ) ); + return *this; + } + + operator VkPhysicalDeviceShaderCoreProperties2AMD const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -47920,6 +68701,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceShaderCoreProperties2AMD const& ) const = default; +#else bool operator==( PhysicalDeviceShaderCoreProperties2AMD const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -47932,65 +68717,57 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD; void* pNext = {}; VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures = {}; uint32_t activeComputeUnitCount = {}; + }; static_assert( sizeof( PhysicalDeviceShaderCoreProperties2AMD ) == sizeof( VkPhysicalDeviceShaderCoreProperties2AMD ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceShaderCoreProperties2AMD; + }; + struct PhysicalDeviceShaderCorePropertiesAMD { - PhysicalDeviceShaderCorePropertiesAMD( uint32_t shaderEngineCount_ = {}, - uint32_t shaderArraysPerEngineCount_ = {}, - uint32_t computeUnitsPerShaderArray_ = {}, - uint32_t simdPerComputeUnit_ = {}, - uint32_t wavefrontsPerSimd_ = {}, - uint32_t wavefrontSize_ = {}, - uint32_t sgprsPerSimd_ = {}, - uint32_t minSgprAllocation_ = {}, - uint32_t maxSgprAllocation_ = {}, - uint32_t sgprAllocationGranularity_ = {}, - uint32_t vgprsPerSimd_ = {}, - uint32_t minVgprAllocation_ = {}, - uint32_t maxVgprAllocation_ = {}, - uint32_t vgprAllocationGranularity_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderEngineCount( shaderEngineCount_ ) - , shaderArraysPerEngineCount( shaderArraysPerEngineCount_ ) - , computeUnitsPerShaderArray( computeUnitsPerShaderArray_ ) - , simdPerComputeUnit( simdPerComputeUnit_ ) - , wavefrontsPerSimd( wavefrontsPerSimd_ ) - , wavefrontSize( wavefrontSize_ ) - , sgprsPerSimd( sgprsPerSimd_ ) - , minSgprAllocation( minSgprAllocation_ ) - , maxSgprAllocation( maxSgprAllocation_ ) - , sgprAllocationGranularity( sgprAllocationGranularity_ ) - , vgprsPerSimd( vgprsPerSimd_ ) - , minVgprAllocation( minVgprAllocation_ ) - , maxVgprAllocation( maxVgprAllocation_ ) - , vgprAllocationGranularity( vgprAllocationGranularity_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD(uint32_t shaderEngineCount_ = {}, uint32_t shaderArraysPerEngineCount_ = {}, uint32_t computeUnitsPerShaderArray_ = {}, uint32_t simdPerComputeUnit_ = {}, uint32_t wavefrontsPerSimd_ = {}, uint32_t wavefrontSize_ = {}, uint32_t sgprsPerSimd_ = {}, uint32_t minSgprAllocation_ = {}, uint32_t maxSgprAllocation_ = {}, uint32_t sgprAllocationGranularity_ = {}, uint32_t vgprsPerSimd_ = {}, uint32_t minVgprAllocation_ = {}, uint32_t maxVgprAllocation_ = {}, uint32_t vgprAllocationGranularity_ = {}) VULKAN_HPP_NOEXCEPT + : shaderEngineCount( shaderEngineCount_ ), shaderArraysPerEngineCount( shaderArraysPerEngineCount_ ), computeUnitsPerShaderArray( computeUnitsPerShaderArray_ ), simdPerComputeUnit( simdPerComputeUnit_ ), wavefrontsPerSimd( wavefrontsPerSimd_ ), wavefrontSize( wavefrontSize_ ), sgprsPerSimd( sgprsPerSimd_ ), minSgprAllocation( minSgprAllocation_ ), maxSgprAllocation( maxSgprAllocation_ ), sgprAllocationGranularity( sgprAllocationGranularity_ ), vgprsPerSimd( vgprsPerSimd_ ), minVgprAllocation( minVgprAllocation_ ), maxVgprAllocation( maxVgprAllocation_ ), vgprAllocationGranularity( vgprAllocationGranularity_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD ) - offsetof( PhysicalDeviceShaderCorePropertiesAMD, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderCorePropertiesAMD( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceShaderCorePropertiesAMD& operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderCorePropertiesAMD & operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceShaderCorePropertiesAMD & operator=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceShaderCorePropertiesAMD ) ); + return *this; + } + + operator VkPhysicalDeviceShaderCorePropertiesAMD const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -48001,6 +68778,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceShaderCorePropertiesAMD const& ) const = default; +#else bool operator==( PhysicalDeviceShaderCorePropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -48025,6 +68806,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD; @@ -48043,30 +68827,44 @@ namespace VULKAN_HPP_NAMESPACE uint32_t minVgprAllocation = {}; uint32_t maxVgprAllocation = {}; uint32_t vgprAllocationGranularity = {}; + }; static_assert( sizeof( PhysicalDeviceShaderCorePropertiesAMD ) == sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceShaderCorePropertiesAMD; + }; + struct PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {}) VULKAN_HPP_NOEXCEPT + : shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) - offsetof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT& operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & operator=( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) ); return *this; } @@ -48082,6 +68880,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -48092,6 +68891,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -48103,35 +68906,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {}; + }; static_assert( sizeof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; + }; + struct PhysicalDeviceShaderDrawParametersFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderDrawParameters( shaderDrawParameters_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {}) VULKAN_HPP_NOEXCEPT + : shaderDrawParameters( shaderDrawParameters_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures ) - offsetof( PhysicalDeviceShaderDrawParametersFeatures, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceShaderDrawParametersFeatures& operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderDrawParametersFeatures & operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderDrawParametersFeatures & operator=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceShaderDrawParametersFeatures ) ); return *this; } @@ -48147,6 +68967,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceShaderDrawParametersFeatures const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -48157,6 +68978,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceShaderDrawParametersFeatures const& ) const = default; +#else bool operator==( PhysicalDeviceShaderDrawParametersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -48168,37 +68993,53 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {}; + }; static_assert( sizeof( PhysicalDeviceShaderDrawParametersFeatures ) == sizeof( VkPhysicalDeviceShaderDrawParametersFeatures ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceShaderDrawParametersFeatures; + }; + using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures; + struct PhysicalDeviceShaderFloat16Int8Features { - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderFloat16( shaderFloat16_ ) - , shaderInt8( shaderInt8_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderFloat16Int8Features; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features(VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}) VULKAN_HPP_NOEXCEPT + : shaderFloat16( shaderFloat16_ ), shaderInt8( shaderInt8_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features ) - offsetof( PhysicalDeviceShaderFloat16Int8Features, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderFloat16Int8Features( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceShaderFloat16Int8Features& operator=( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderFloat16Int8Features & operator=( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderFloat16Int8Features & operator=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceShaderFloat16Int8Features ) ); return *this; } @@ -48220,6 +69061,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceShaderFloat16Int8Features const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -48230,6 +69072,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceShaderFloat16Int8Features const& ) const = default; +#else bool operator==( PhysicalDeviceShaderFloat16Int8Features const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -48242,36 +69088,55 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloat16Int8Features; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {}; + }; static_assert( sizeof( PhysicalDeviceShaderFloat16Int8Features ) == sizeof( VkPhysicalDeviceShaderFloat16Int8Features ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceShaderFloat16Int8Features; + }; + using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; + using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; + struct PhysicalDeviceShaderImageFootprintFeaturesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ = {} ) VULKAN_HPP_NOEXCEPT - : imageFootprint( imageFootprint_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ = {}) VULKAN_HPP_NOEXCEPT + : imageFootprint( imageFootprint_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV ) - offsetof( PhysicalDeviceShaderImageFootprintFeaturesNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceShaderImageFootprintFeaturesNV& operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) ); return *this; } @@ -48287,6 +69152,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -48297,6 +69163,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceShaderImageFootprintFeaturesNV const& ) const = default; +#else bool operator==( PhysicalDeviceShaderImageFootprintFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -48308,35 +69178,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 imageFootprint = {}; + }; static_assert( sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) == sizeof( VkPhysicalDeviceShaderImageFootprintFeaturesNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceShaderImageFootprintFeaturesNV; + }; + struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL { - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderIntegerFunctions2( shaderIntegerFunctions2_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ = {}) VULKAN_HPP_NOEXCEPT + : shaderIntegerFunctions2( shaderIntegerFunctions2_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) - offsetof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL& operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) ); return *this; } @@ -48352,6 +69239,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -48362,6 +69250,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& ) const = default; +#else bool operator==( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -48373,35 +69265,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2 = {}; + }; static_assert( sizeof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) == sizeof( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + }; + struct PhysicalDeviceShaderSMBuiltinsFeaturesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderSMBuiltins( shaderSMBuiltins_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ = {}) VULKAN_HPP_NOEXCEPT + : shaderSMBuiltins( shaderSMBuiltins_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV ) - offsetof( PhysicalDeviceShaderSMBuiltinsFeaturesNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderSMBuiltinsFeaturesNV( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceShaderSMBuiltinsFeaturesNV& operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceShaderSMBuiltinsFeaturesNV ) ); return *this; } @@ -48417,6 +69326,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -48427,6 +69337,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceShaderSMBuiltinsFeaturesNV const& ) const = default; +#else bool operator==( PhysicalDeviceShaderSMBuiltinsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -48438,40 +69352,56 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins = {}; + }; static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsFeaturesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSMBuiltinsFeaturesNV; + }; + struct PhysicalDeviceShaderSMBuiltinsPropertiesNV { - PhysicalDeviceShaderSMBuiltinsPropertiesNV( uint32_t shaderSMCount_ = {}, - uint32_t shaderWarpsPerSM_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderSMCount( shaderSMCount_ ) - , shaderWarpsPerSM( shaderWarpsPerSM_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV(uint32_t shaderSMCount_ = {}, uint32_t shaderWarpsPerSM_ = {}) VULKAN_HPP_NOEXCEPT + : shaderSMCount( shaderSMCount_ ), shaderWarpsPerSM( shaderWarpsPerSM_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV ) - offsetof( PhysicalDeviceShaderSMBuiltinsPropertiesNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderSMBuiltinsPropertiesNV( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceShaderSMBuiltinsPropertiesNV& operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceShaderSMBuiltinsPropertiesNV ) ); + return *this; + } + + operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -48482,6 +69412,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceShaderSMBuiltinsPropertiesNV const& ) const = default; +#else bool operator==( PhysicalDeviceShaderSMBuiltinsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -48494,36 +69428,53 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV; void* pNext = {}; uint32_t shaderSMCount = {}; uint32_t shaderWarpsPerSM = {}; + }; static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsPropertiesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSMBuiltinsPropertiesNV; + }; + struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}) VULKAN_HPP_NOEXCEPT + : shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures ) - offsetof( PhysicalDeviceShaderSubgroupExtendedTypesFeatures, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceShaderSubgroupExtendedTypesFeatures& operator=( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderSubgroupExtendedTypesFeatures & operator=( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderSubgroupExtendedTypesFeatures & operator=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceShaderSubgroupExtendedTypesFeatures ) ); return *this; } @@ -48539,6 +69490,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -48549,6 +69501,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const& ) const = default; +#else bool operator==( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -48560,37 +69516,53 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {}; + }; static_assert( sizeof( PhysicalDeviceShaderSubgroupExtendedTypesFeatures ) == sizeof( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSubgroupExtendedTypesFeatures; + }; + using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures; + struct PhysicalDeviceShadingRateImageFeaturesNV { - VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ = {} ) VULKAN_HPP_NOEXCEPT - : shadingRateImage( shadingRateImage_ ) - , shadingRateCoarseSampleOrder( shadingRateCoarseSampleOrder_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ = {}) VULKAN_HPP_NOEXCEPT + : shadingRateImage( shadingRateImage_ ), shadingRateCoarseSampleOrder( shadingRateCoarseSampleOrder_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV ) - offsetof( PhysicalDeviceShadingRateImageFeaturesNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceShadingRateImageFeaturesNV& operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShadingRateImageFeaturesNV & operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShadingRateImageFeaturesNV & operator=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) ); return *this; } @@ -48612,6 +69584,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceShadingRateImageFeaturesNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -48622,6 +69595,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceShadingRateImageFeaturesNV const& ) const = default; +#else bool operator==( PhysicalDeviceShadingRateImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -48634,43 +69611,57 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage = {}; VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder = {}; + }; static_assert( sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) == sizeof( VkPhysicalDeviceShadingRateImageFeaturesNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceShadingRateImageFeaturesNV; + }; + struct PhysicalDeviceShadingRateImagePropertiesNV { - PhysicalDeviceShadingRateImagePropertiesNV( VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize_ = {}, - uint32_t shadingRatePaletteSize_ = {}, - uint32_t shadingRateMaxCoarseSamples_ = {} ) VULKAN_HPP_NOEXCEPT - : shadingRateTexelSize( shadingRateTexelSize_ ) - , shadingRatePaletteSize( shadingRatePaletteSize_ ) - , shadingRateMaxCoarseSamples( shadingRateMaxCoarseSamples_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV(VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize_ = {}, uint32_t shadingRatePaletteSize_ = {}, uint32_t shadingRateMaxCoarseSamples_ = {}) VULKAN_HPP_NOEXCEPT + : shadingRateTexelSize( shadingRateTexelSize_ ), shadingRatePaletteSize( shadingRatePaletteSize_ ), shadingRateMaxCoarseSamples( shadingRateMaxCoarseSamples_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV ) - offsetof( PhysicalDeviceShadingRateImagePropertiesNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShadingRateImagePropertiesNV( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceShadingRateImagePropertiesNV& operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShadingRateImagePropertiesNV & operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceShadingRateImagePropertiesNV & operator=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceShadingRateImagePropertiesNV ) ); + return *this; + } + + operator VkPhysicalDeviceShadingRateImagePropertiesNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -48681,6 +69672,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceShadingRateImagePropertiesNV const& ) const = default; +#else bool operator==( PhysicalDeviceShadingRateImagePropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -48694,6 +69689,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV; @@ -48701,144 +69699,48 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize = {}; uint32_t shadingRatePaletteSize = {}; uint32_t shadingRateMaxCoarseSamples = {}; + }; static_assert( sizeof( PhysicalDeviceShadingRateImagePropertiesNV ) == sizeof( VkPhysicalDeviceShadingRateImagePropertiesNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct PhysicalDeviceSparseImageFormatInfo2 + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, - VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, - VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal ) VULKAN_HPP_NOEXCEPT - : format( format_ ) - , type( type_ ) - , samples( samples_ ) - , usage( usage_ ) - , tiling( tiling_ ) - {} - - VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 ) - offsetof( PhysicalDeviceSparseImageFormatInfo2, pNext ) ); - return *this; - } - - PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceSparseImageFormatInfo2& operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceSparseImageFormatInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceSparseImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT - { - format = format_; - return *this; - } - - PhysicalDeviceSparseImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT - { - type = type_; - return *this; - } - - PhysicalDeviceSparseImageFormatInfo2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT - { - samples = samples_; - return *this; - } - - PhysicalDeviceSparseImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT - { - usage = usage_; - return *this; - } - - PhysicalDeviceSparseImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT - { - tiling = tiling_; - return *this; - } - - operator VkPhysicalDeviceSparseImageFormatInfo2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceSparseImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( format == rhs.format ) - && ( type == rhs.type ) - && ( samples == rhs.samples ) - && ( usage == rhs.usage ) - && ( tiling == rhs.tiling ); - } - - bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D; - VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; - VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + using Type = PhysicalDeviceShadingRateImagePropertiesNV; }; - static_assert( sizeof( PhysicalDeviceSparseImageFormatInfo2 ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PhysicalDeviceSubgroupProperties { - PhysicalDeviceSubgroupProperties( uint32_t subgroupSize_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages_ = {}, - VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages_ = {} ) VULKAN_HPP_NOEXCEPT - : subgroupSize( subgroupSize_ ) - , supportedStages( supportedStages_ ) - , supportedOperations( supportedOperations_ ) - , quadOperationsInAllStages( quadOperationsInAllStages_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties(uint32_t subgroupSize_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages_ = {}, VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_ = {}, VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages_ = {}) VULKAN_HPP_NOEXCEPT + : subgroupSize( subgroupSize_ ), supportedStages( supportedStages_ ), supportedOperations( supportedOperations_ ), quadOperationsInAllStages( quadOperationsInAllStages_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties ) - offsetof( PhysicalDeviceSubgroupProperties, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSubgroupProperties( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceSubgroupProperties& operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSubgroupProperties & operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceSubgroupProperties & operator=( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceSubgroupProperties ) ); + return *this; + } + + operator VkPhysicalDeviceSubgroupProperties const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -48849,6 +69751,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceSubgroupProperties const& ) const = default; +#else bool operator==( PhysicalDeviceSubgroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -48863,6 +69769,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties; @@ -48871,32 +69780,44 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages = {}; VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations = {}; VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages = {}; + }; static_assert( sizeof( PhysicalDeviceSubgroupProperties ) == sizeof( VkPhysicalDeviceSubgroupProperties ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceSubgroupProperties; + }; + struct PhysicalDeviceSubgroupSizeControlFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {} ) VULKAN_HPP_NOEXCEPT - : subgroupSizeControl( subgroupSizeControl_ ) - , computeFullSubgroups( computeFullSubgroups_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {}) VULKAN_HPP_NOEXCEPT + : subgroupSizeControl( subgroupSizeControl_ ), computeFullSubgroups( computeFullSubgroups_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeaturesEXT ) - offsetof( PhysicalDeviceSubgroupSizeControlFeaturesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeaturesEXT( PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSubgroupSizeControlFeaturesEXT( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceSubgroupSizeControlFeaturesEXT& operator=( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSubgroupSizeControlFeaturesEXT & operator=( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceSubgroupSizeControlFeaturesEXT & operator=( PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceSubgroupSizeControlFeaturesEXT ) ); return *this; } @@ -48918,6 +69839,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -48928,6 +69850,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceSubgroupSizeControlFeaturesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceSubgroupSizeControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -48940,45 +69866,57 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {}; VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {}; + }; static_assert( sizeof( PhysicalDeviceSubgroupSizeControlFeaturesEXT ) == sizeof( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceSubgroupSizeControlFeaturesEXT; + }; + struct PhysicalDeviceSubgroupSizeControlPropertiesEXT { - PhysicalDeviceSubgroupSizeControlPropertiesEXT( uint32_t minSubgroupSize_ = {}, - uint32_t maxSubgroupSize_ = {}, - uint32_t maxComputeWorkgroupSubgroups_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {} ) VULKAN_HPP_NOEXCEPT - : minSubgroupSize( minSubgroupSize_ ) - , maxSubgroupSize( maxSubgroupSize_ ) - , maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ ) - , requiredSubgroupSizeStages( requiredSubgroupSizeStages_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlPropertiesEXT(uint32_t minSubgroupSize_ = {}, uint32_t maxSubgroupSize_ = {}, uint32_t maxComputeWorkgroupSubgroups_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {}) VULKAN_HPP_NOEXCEPT + : minSubgroupSize( minSubgroupSize_ ), maxSubgroupSize( maxSubgroupSize_ ), maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ ), requiredSubgroupSizeStages( requiredSubgroupSizeStages_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlPropertiesEXT ) - offsetof( PhysicalDeviceSubgroupSizeControlPropertiesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlPropertiesEXT( PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSubgroupSizeControlPropertiesEXT( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceSubgroupSizeControlPropertiesEXT& operator=( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSubgroupSizeControlPropertiesEXT & operator=( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceSubgroupSizeControlPropertiesEXT & operator=( PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceSubgroupSizeControlPropertiesEXT ) ); + return *this; + } + + operator VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -48989,6 +69927,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceSubgroupSizeControlPropertiesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceSubgroupSizeControlPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -49003,6 +69945,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT; @@ -49011,95 +69956,44 @@ namespace VULKAN_HPP_NAMESPACE uint32_t maxSubgroupSize = {}; uint32_t maxComputeWorkgroupSubgroups = {}; VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {}; + }; static_assert( sizeof( PhysicalDeviceSubgroupSizeControlPropertiesEXT ) == sizeof( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct PhysicalDeviceSurfaceInfo2KHR + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {} ) VULKAN_HPP_NOEXCEPT - : surface( surface_ ) - {} - - VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR ) - offsetof( PhysicalDeviceSurfaceInfo2KHR, pNext ) ); - return *this; - } - - PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceSurfaceInfo2KHR& operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceSurfaceInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceSurfaceInfo2KHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT - { - surface = surface_; - return *this; - } - - operator VkPhysicalDeviceSurfaceInfo2KHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceSurfaceInfo2KHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( surface == rhs.surface ); - } - - bool operator!=( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {}; + using Type = PhysicalDeviceSubgroupSizeControlPropertiesEXT; }; - static_assert( sizeof( PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = {} ) VULKAN_HPP_NOEXCEPT - : texelBufferAlignment( texelBufferAlignment_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = {}) VULKAN_HPP_NOEXCEPT + : texelBufferAlignment( texelBufferAlignment_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) - offsetof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceTexelBufferAlignmentFeaturesEXT& operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) ); return *this; } @@ -49115,6 +70009,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -49125,6 +70020,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -49136,44 +70035,56 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment = {}; + }; static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceTexelBufferAlignmentFeaturesEXT; + }; + struct PhysicalDeviceTexelBufferAlignmentPropertiesEXT { - PhysicalDeviceTexelBufferAlignmentPropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {} ) VULKAN_HPP_NOEXCEPT - : storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ ) - , storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ ) - , uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ ) - , uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentPropertiesEXT(VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {}) VULKAN_HPP_NOEXCEPT + : storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ ), storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ ), uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ ), uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) - offsetof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentPropertiesEXT( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceTexelBufferAlignmentPropertiesEXT( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceTexelBufferAlignmentPropertiesEXT& operator=( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTexelBufferAlignmentPropertiesEXT & operator=( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceTexelBufferAlignmentPropertiesEXT & operator=( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) ); + return *this; + } + + operator VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -49184,6 +70095,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -49198,6 +70113,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT; @@ -49206,30 +70124,44 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {}; VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {}; VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {}; + }; static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceTexelBufferAlignmentPropertiesEXT; + }; + struct PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {} ) VULKAN_HPP_NOEXCEPT - : textureCompressionASTC_HDR( textureCompressionASTC_HDR_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {}) VULKAN_HPP_NOEXCEPT + : textureCompressionASTC_HDR( textureCompressionASTC_HDR_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) - offsetof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT& operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & operator=( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) ); return *this; } @@ -49245,6 +70177,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -49255,6 +70188,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -49266,35 +70203,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {}; + }; static_assert( sizeof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) == sizeof( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT; + }; + struct PhysicalDeviceTimelineSemaphoreFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {} ) VULKAN_HPP_NOEXCEPT - : timelineSemaphore( timelineSemaphore_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures(VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}) VULKAN_HPP_NOEXCEPT + : timelineSemaphore( timelineSemaphore_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures ) - offsetof( PhysicalDeviceTimelineSemaphoreFeatures, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceTimelineSemaphoreFeatures( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceTimelineSemaphoreFeatures& operator=( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTimelineSemaphoreFeatures & operator=( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceTimelineSemaphoreFeatures & operator=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceTimelineSemaphoreFeatures ) ); return *this; } @@ -49310,6 +70264,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceTimelineSemaphoreFeatures const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -49320,6 +70275,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceTimelineSemaphoreFeatures const& ) const = default; +#else bool operator==( PhysicalDeviceTimelineSemaphoreFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -49331,38 +70290,57 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {}; + }; static_assert( sizeof( PhysicalDeviceTimelineSemaphoreFeatures ) == sizeof( VkPhysicalDeviceTimelineSemaphoreFeatures ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceTimelineSemaphoreFeatures; + }; + using PhysicalDeviceTimelineSemaphoreFeaturesKHR = PhysicalDeviceTimelineSemaphoreFeatures; + struct PhysicalDeviceTimelineSemaphoreProperties { - PhysicalDeviceTimelineSemaphoreProperties( uint64_t maxTimelineSemaphoreValueDifference_ = {} ) VULKAN_HPP_NOEXCEPT - : maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties(uint64_t maxTimelineSemaphoreValueDifference_ = {}) VULKAN_HPP_NOEXCEPT + : maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties ) - offsetof( PhysicalDeviceTimelineSemaphoreProperties, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceTimelineSemaphoreProperties( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceTimelineSemaphoreProperties& operator=( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTimelineSemaphoreProperties & operator=( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceTimelineSemaphoreProperties & operator=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceTimelineSemaphoreProperties ) ); + return *this; + } + + operator VkPhysicalDeviceTimelineSemaphoreProperties const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -49373,6 +70351,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceTimelineSemaphoreProperties const& ) const = default; +#else bool operator==( PhysicalDeviceTimelineSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -49384,111 +70366,53 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties; void* pNext = {}; uint64_t maxTimelineSemaphoreValueDifference = {}; + }; static_assert( sizeof( PhysicalDeviceTimelineSemaphoreProperties ) == sizeof( VkPhysicalDeviceTimelineSemaphoreProperties ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct PhysicalDeviceToolPropertiesEXT + template <> + struct CppType { - PhysicalDeviceToolPropertiesEXT( std::array const& name_ = {}, - std::array const& version_ = {}, - VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT purposes_ = {}, - std::array const& description_ = {}, - std::array const& layer_ = {} ) VULKAN_HPP_NOEXCEPT - : name{} - , version{} - , purposes( purposes_ ) - , description{} - , layer{} - { - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( name, name_ ); - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( version, version_ ); - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( description, description_ ); - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( layer, layer_ ); - } - - VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT ) - offsetof( PhysicalDeviceToolPropertiesEXT, pNext ) ); - return *this; - } - - PhysicalDeviceToolPropertiesEXT( VkPhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceToolPropertiesEXT& operator=( VkPhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkPhysicalDeviceToolPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceToolPropertiesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceToolPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memcmp( name, rhs.name, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 ) - && ( memcmp( version, rhs.version, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 ) - && ( purposes == rhs.purposes ) - && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 ) - && ( memcmp( layer, rhs.layer, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 ); - } - - bool operator!=( PhysicalDeviceToolPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceToolPropertiesEXT; - void* pNext = {}; - char name[VK_MAX_EXTENSION_NAME_SIZE] = {}; - char version[VK_MAX_EXTENSION_NAME_SIZE] = {}; - VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT purposes = {}; - char description[VK_MAX_DESCRIPTION_SIZE] = {}; - char layer[VK_MAX_EXTENSION_NAME_SIZE] = {}; + using Type = PhysicalDeviceTimelineSemaphoreProperties; }; - static_assert( sizeof( PhysicalDeviceToolPropertiesEXT ) == sizeof( VkPhysicalDeviceToolPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + using PhysicalDeviceTimelineSemaphorePropertiesKHR = PhysicalDeviceTimelineSemaphoreProperties; struct PhysicalDeviceTransformFeedbackFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ = {} ) VULKAN_HPP_NOEXCEPT - : transformFeedback( transformFeedback_ ) - , geometryStreams( geometryStreams_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ = {}, VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ = {}) VULKAN_HPP_NOEXCEPT + : transformFeedback( transformFeedback_ ), geometryStreams( geometryStreams_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT ) - offsetof( PhysicalDeviceTransformFeedbackFeaturesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceTransformFeedbackFeaturesEXT& operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) ); return *this; } @@ -49510,6 +70434,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -49520,6 +70445,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceTransformFeedbackFeaturesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceTransformFeedbackFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -49532,57 +70461,57 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 transformFeedback = {}; VULKAN_HPP_NAMESPACE::Bool32 geometryStreams = {}; + }; static_assert( sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackFeaturesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceTransformFeedbackFeaturesEXT; + }; + struct PhysicalDeviceTransformFeedbackPropertiesEXT { - PhysicalDeviceTransformFeedbackPropertiesEXT( uint32_t maxTransformFeedbackStreams_ = {}, - uint32_t maxTransformFeedbackBuffers_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize_ = {}, - uint32_t maxTransformFeedbackStreamDataSize_ = {}, - uint32_t maxTransformFeedbackBufferDataSize_ = {}, - uint32_t maxTransformFeedbackBufferDataStride_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw_ = {} ) VULKAN_HPP_NOEXCEPT - : maxTransformFeedbackStreams( maxTransformFeedbackStreams_ ) - , maxTransformFeedbackBuffers( maxTransformFeedbackBuffers_ ) - , maxTransformFeedbackBufferSize( maxTransformFeedbackBufferSize_ ) - , maxTransformFeedbackStreamDataSize( maxTransformFeedbackStreamDataSize_ ) - , maxTransformFeedbackBufferDataSize( maxTransformFeedbackBufferDataSize_ ) - , maxTransformFeedbackBufferDataStride( maxTransformFeedbackBufferDataStride_ ) - , transformFeedbackQueries( transformFeedbackQueries_ ) - , transformFeedbackStreamsLinesTriangles( transformFeedbackStreamsLinesTriangles_ ) - , transformFeedbackRasterizationStreamSelect( transformFeedbackRasterizationStreamSelect_ ) - , transformFeedbackDraw( transformFeedbackDraw_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT(uint32_t maxTransformFeedbackStreams_ = {}, uint32_t maxTransformFeedbackBuffers_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize_ = {}, uint32_t maxTransformFeedbackStreamDataSize_ = {}, uint32_t maxTransformFeedbackBufferDataSize_ = {}, uint32_t maxTransformFeedbackBufferDataStride_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw_ = {}) VULKAN_HPP_NOEXCEPT + : maxTransformFeedbackStreams( maxTransformFeedbackStreams_ ), maxTransformFeedbackBuffers( maxTransformFeedbackBuffers_ ), maxTransformFeedbackBufferSize( maxTransformFeedbackBufferSize_ ), maxTransformFeedbackStreamDataSize( maxTransformFeedbackStreamDataSize_ ), maxTransformFeedbackBufferDataSize( maxTransformFeedbackBufferDataSize_ ), maxTransformFeedbackBufferDataStride( maxTransformFeedbackBufferDataStride_ ), transformFeedbackQueries( transformFeedbackQueries_ ), transformFeedbackStreamsLinesTriangles( transformFeedbackStreamsLinesTriangles_ ), transformFeedbackRasterizationStreamSelect( transformFeedbackRasterizationStreamSelect_ ), transformFeedbackDraw( transformFeedbackDraw_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT ) - offsetof( PhysicalDeviceTransformFeedbackPropertiesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceTransformFeedbackPropertiesEXT( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceTransformFeedbackPropertiesEXT& operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceTransformFeedbackPropertiesEXT ) ); + return *this; + } + + operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -49593,6 +70522,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceTransformFeedbackPropertiesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceTransformFeedbackPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -49613,6 +70546,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT; @@ -49627,30 +70563,44 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles = {}; VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect = {}; VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw = {}; + }; static_assert( sizeof( PhysicalDeviceTransformFeedbackPropertiesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackPropertiesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceTransformFeedbackPropertiesEXT; + }; + struct PhysicalDeviceUniformBufferStandardLayoutFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {} ) VULKAN_HPP_NOEXCEPT - : uniformBufferStandardLayout( uniformBufferStandardLayout_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures(VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}) VULKAN_HPP_NOEXCEPT + : uniformBufferStandardLayout( uniformBufferStandardLayout_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures ) - offsetof( PhysicalDeviceUniformBufferStandardLayoutFeatures, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceUniformBufferStandardLayoutFeatures( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceUniformBufferStandardLayoutFeatures& operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceUniformBufferStandardLayoutFeatures & operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceUniformBufferStandardLayoutFeatures & operator=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceUniformBufferStandardLayoutFeatures ) ); return *this; } @@ -49666,6 +70616,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -49676,6 +70627,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceUniformBufferStandardLayoutFeatures const& ) const = default; +#else bool operator==( PhysicalDeviceUniformBufferStandardLayoutFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -49687,37 +70642,53 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {}; + }; static_assert( sizeof( PhysicalDeviceUniformBufferStandardLayoutFeatures ) == sizeof( VkPhysicalDeviceUniformBufferStandardLayoutFeatures ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceUniformBufferStandardLayoutFeatures; + }; + using PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = PhysicalDeviceUniformBufferStandardLayoutFeatures; + struct PhysicalDeviceVariablePointersFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {} ) VULKAN_HPP_NOEXCEPT - : variablePointersStorageBuffer( variablePointersStorageBuffer_ ) - , variablePointers( variablePointers_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVariablePointersFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures(VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}) VULKAN_HPP_NOEXCEPT + : variablePointersStorageBuffer( variablePointersStorageBuffer_ ), variablePointers( variablePointers_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures ) - offsetof( PhysicalDeviceVariablePointersFeatures, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVariablePointersFeatures( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceVariablePointersFeatures& operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVariablePointersFeatures & operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceVariablePointersFeatures & operator=( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceVariablePointersFeatures ) ); return *this; } @@ -49739,6 +70710,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceVariablePointersFeatures const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -49749,6 +70721,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceVariablePointersFeatures const& ) const = default; +#else bool operator==( PhysicalDeviceVariablePointersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -49761,38 +70737,56 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVariablePointersFeatures; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {}; VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {}; + }; static_assert( sizeof( PhysicalDeviceVariablePointersFeatures ) == sizeof( VkPhysicalDeviceVariablePointersFeatures ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceVariablePointersFeatures; + }; + using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures; + using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures; + using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures; + struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {} ) VULKAN_HPP_NOEXCEPT - : vertexAttributeInstanceRateDivisor( vertexAttributeInstanceRateDivisor_ ) - , vertexAttributeInstanceRateZeroDivisor( vertexAttributeInstanceRateZeroDivisor_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {}) VULKAN_HPP_NOEXCEPT + : vertexAttributeInstanceRateDivisor( vertexAttributeInstanceRateDivisor_ ), vertexAttributeInstanceRateZeroDivisor( vertexAttributeInstanceRateZeroDivisor_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) - offsetof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceVertexAttributeDivisorFeaturesEXT& operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVertexAttributeDivisorFeaturesEXT & operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceVertexAttributeDivisorFeaturesEXT & operator=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) ); return *this; } @@ -49814,6 +70808,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -49824,6 +70819,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -49836,39 +70835,57 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor = {}; VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor = {}; + }; static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceVertexAttributeDivisorFeaturesEXT; + }; + struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT { - PhysicalDeviceVertexAttributeDivisorPropertiesEXT( uint32_t maxVertexAttribDivisor_ = {} ) VULKAN_HPP_NOEXCEPT - : maxVertexAttribDivisor( maxVertexAttribDivisor_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT(uint32_t maxVertexAttribDivisor_ = {}) VULKAN_HPP_NOEXCEPT + : maxVertexAttribDivisor( maxVertexAttribDivisor_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) - offsetof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceVertexAttributeDivisorPropertiesEXT& operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) ); + return *this; + } + + operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -49879,6 +70896,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -49890,57 +70911,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT; void* pNext = {}; uint32_t maxVertexAttribDivisor = {}; + }; static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceVertexAttributeDivisorPropertiesEXT; + }; + struct PhysicalDeviceVulkan11Features { - VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {} ) VULKAN_HPP_NOEXCEPT - : storageBuffer16BitAccess( storageBuffer16BitAccess_ ) - , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ) - , storagePushConstant16( storagePushConstant16_ ) - , storageInputOutput16( storageInputOutput16_ ) - , multiview( multiview_ ) - , multiviewGeometryShader( multiviewGeometryShader_ ) - , multiviewTessellationShader( multiviewTessellationShader_ ) - , variablePointersStorageBuffer( variablePointersStorageBuffer_ ) - , variablePointers( variablePointers_ ) - , protectedMemory( protectedMemory_ ) - , samplerYcbcrConversion( samplerYcbcrConversion_ ) - , shaderDrawParameters( shaderDrawParameters_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Features; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features(VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}, VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {}) VULKAN_HPP_NOEXCEPT + : storageBuffer16BitAccess( storageBuffer16BitAccess_ ), uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ), storagePushConstant16( storagePushConstant16_ ), storageInputOutput16( storageInputOutput16_ ), multiview( multiview_ ), multiviewGeometryShader( multiviewGeometryShader_ ), multiviewTessellationShader( multiviewTessellationShader_ ), variablePointersStorageBuffer( variablePointersStorageBuffer_ ), variablePointers( variablePointers_ ), protectedMemory( protectedMemory_ ), samplerYcbcrConversion( samplerYcbcrConversion_ ), shaderDrawParameters( shaderDrawParameters_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features ) - offsetof( PhysicalDeviceVulkan11Features, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVulkan11Features( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceVulkan11Features& operator=( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Features & operator=( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceVulkan11Features & operator=( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceVulkan11Features ) ); return *this; } @@ -50022,6 +71038,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceVulkan11Features const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -50032,6 +71049,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceVulkan11Features const& ) const = default; +#else bool operator==( PhysicalDeviceVulkan11Features const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -50054,6 +71075,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Features; @@ -50070,160 +71094,47 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {}; VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {}; + }; static_assert( sizeof( PhysicalDeviceVulkan11Features ) == sizeof( VkPhysicalDeviceVulkan11Features ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan11Features; + }; + struct PhysicalDeviceVulkan11Properties { - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( std::array const& deviceUUID_ = {}, - std::array const& driverUUID_ = {}, - std::array const& deviceLUID_ = {}, - uint32_t deviceNodeMask_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}, - uint32_t subgroupSize_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages_ = {}, - VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages_ = {}, - VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes, - uint32_t maxMultiviewViewCount_ = {}, - uint32_t maxMultiviewInstanceIndex_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}, - uint32_t maxPerSetDescriptors_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {} ) VULKAN_HPP_NOEXCEPT - : deviceUUID{} - , driverUUID{} - , deviceLUID{} - , deviceNodeMask( deviceNodeMask_ ) - , deviceLUIDValid( deviceLUIDValid_ ) - , subgroupSize( subgroupSize_ ) - , subgroupSupportedStages( subgroupSupportedStages_ ) - , subgroupSupportedOperations( subgroupSupportedOperations_ ) - , subgroupQuadOperationsInAllStages( subgroupQuadOperationsInAllStages_ ) - , pointClippingBehavior( pointClippingBehavior_ ) - , maxMultiviewViewCount( maxMultiviewViewCount_ ) - , maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ ) - , protectedNoFault( protectedNoFault_ ) - , maxPerSetDescriptors( maxPerSetDescriptors_ ) - , maxMemoryAllocationSize( maxMemoryAllocationSize_ ) - { - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( deviceUUID, deviceUUID_ ); - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( driverUUID, driverUUID_ ); - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( deviceLUID, deviceLUID_ ); - } + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Properties; - VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties ) - offsetof( PhysicalDeviceVulkan11Properties, pNext ) ); - return *this; - } +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties(std::array const& deviceUUID_ = {}, std::array const& driverUUID_ = {}, std::array const& deviceLUID_ = {}, uint32_t deviceNodeMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}, uint32_t subgroupSize_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages_ = {}, VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages_ = {}, VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes, uint32_t maxMultiviewViewCount_ = {}, uint32_t maxMultiviewInstanceIndex_ = {}, VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}, uint32_t maxPerSetDescriptors_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {}) VULKAN_HPP_NOEXCEPT + : deviceUUID( deviceUUID_ ), driverUUID( driverUUID_ ), deviceLUID( deviceLUID_ ), deviceNodeMask( deviceNodeMask_ ), deviceLUIDValid( deviceLUIDValid_ ), subgroupSize( subgroupSize_ ), subgroupSupportedStages( subgroupSupportedStages_ ), subgroupSupportedOperations( subgroupSupportedOperations_ ), subgroupQuadOperationsInAllStages( subgroupQuadOperationsInAllStages_ ), pointClippingBehavior( pointClippingBehavior_ ), maxMultiviewViewCount( maxMultiviewViewCount_ ), maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ ), protectedNoFault( protectedNoFault_ ), maxPerSetDescriptors( maxPerSetDescriptors_ ), maxMemoryAllocationSize( maxMemoryAllocationSize_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVulkan11Properties( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceVulkan11Properties& operator=( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Properties & operator=( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceVulkan11Properties & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Properties & operator=( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceVulkan11Properties ) ); return *this; } - PhysicalDeviceVulkan11Properties & setDeviceUUID( std::array deviceUUID_ ) VULKAN_HPP_NOEXCEPT - { - memcpy( deviceUUID, deviceUUID_.data(), VK_UUID_SIZE * sizeof( uint8_t ) ); - return *this; - } - - PhysicalDeviceVulkan11Properties & setDriverUUID( std::array driverUUID_ ) VULKAN_HPP_NOEXCEPT - { - memcpy( driverUUID, driverUUID_.data(), VK_UUID_SIZE * sizeof( uint8_t ) ); - return *this; - } - - PhysicalDeviceVulkan11Properties & setDeviceLUID( std::array deviceLUID_ ) VULKAN_HPP_NOEXCEPT - { - memcpy( deviceLUID, deviceLUID_.data(), VK_LUID_SIZE * sizeof( uint8_t ) ); - return *this; - } - - PhysicalDeviceVulkan11Properties & setDeviceNodeMask( uint32_t deviceNodeMask_ ) VULKAN_HPP_NOEXCEPT - { - deviceNodeMask = deviceNodeMask_; - return *this; - } - - PhysicalDeviceVulkan11Properties & setDeviceLUIDValid( VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ ) VULKAN_HPP_NOEXCEPT - { - deviceLUIDValid = deviceLUIDValid_; - return *this; - } - - PhysicalDeviceVulkan11Properties & setSubgroupSize( uint32_t subgroupSize_ ) VULKAN_HPP_NOEXCEPT - { - subgroupSize = subgroupSize_; - return *this; - } - - PhysicalDeviceVulkan11Properties & setSubgroupSupportedStages( VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages_ ) VULKAN_HPP_NOEXCEPT - { - subgroupSupportedStages = subgroupSupportedStages_; - return *this; - } - - PhysicalDeviceVulkan11Properties & setSubgroupSupportedOperations( VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations_ ) VULKAN_HPP_NOEXCEPT - { - subgroupSupportedOperations = subgroupSupportedOperations_; - return *this; - } - - PhysicalDeviceVulkan11Properties & setSubgroupQuadOperationsInAllStages( VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages_ ) VULKAN_HPP_NOEXCEPT - { - subgroupQuadOperationsInAllStages = subgroupQuadOperationsInAllStages_; - return *this; - } - - PhysicalDeviceVulkan11Properties & setPointClippingBehavior( VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ ) VULKAN_HPP_NOEXCEPT - { - pointClippingBehavior = pointClippingBehavior_; - return *this; - } - - PhysicalDeviceVulkan11Properties & setMaxMultiviewViewCount( uint32_t maxMultiviewViewCount_ ) VULKAN_HPP_NOEXCEPT - { - maxMultiviewViewCount = maxMultiviewViewCount_; - return *this; - } - - PhysicalDeviceVulkan11Properties & setMaxMultiviewInstanceIndex( uint32_t maxMultiviewInstanceIndex_ ) VULKAN_HPP_NOEXCEPT - { - maxMultiviewInstanceIndex = maxMultiviewInstanceIndex_; - return *this; - } - - PhysicalDeviceVulkan11Properties & setProtectedNoFault( VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ ) VULKAN_HPP_NOEXCEPT - { - protectedNoFault = protectedNoFault_; - return *this; - } - - PhysicalDeviceVulkan11Properties & setMaxPerSetDescriptors( uint32_t maxPerSetDescriptors_ ) VULKAN_HPP_NOEXCEPT - { - maxPerSetDescriptors = maxPerSetDescriptors_; - return *this; - } - - PhysicalDeviceVulkan11Properties & setMaxMemoryAllocationSize( VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ ) VULKAN_HPP_NOEXCEPT - { - maxMemoryAllocationSize = maxMemoryAllocationSize_; - return *this; - } operator VkPhysicalDeviceVulkan11Properties const&() const VULKAN_HPP_NOEXCEPT { @@ -50235,13 +71146,17 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceVulkan11Properties const& ) const = default; +#else bool operator==( PhysicalDeviceVulkan11Properties const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) - && ( memcmp( deviceUUID, rhs.deviceUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 ) - && ( memcmp( driverUUID, rhs.driverUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 ) - && ( memcmp( deviceLUID, rhs.deviceLUID, VK_LUID_SIZE * sizeof( uint8_t ) ) == 0 ) + && ( deviceUUID == rhs.deviceUUID ) + && ( driverUUID == rhs.driverUUID ) + && ( deviceLUID == rhs.deviceLUID ) && ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid ) && ( subgroupSize == rhs.subgroupSize ) @@ -50260,13 +71175,16 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Properties; void* pNext = {}; - uint8_t deviceUUID[VK_UUID_SIZE] = {}; - uint8_t driverUUID[VK_UUID_SIZE] = {}; - uint8_t deviceLUID[VK_LUID_SIZE] = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceLUID = {}; uint32_t deviceNodeMask = {}; VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {}; uint32_t subgroupSize = {}; @@ -50279,122 +71197,44 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {}; uint32_t maxPerSetDescriptors = {}; VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {}; + }; static_assert( sizeof( PhysicalDeviceVulkan11Properties ) == sizeof( VkPhysicalDeviceVulkan11Properties ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan11Properties; + }; + struct PhysicalDeviceVulkan12Features { - VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ = {} ) VULKAN_HPP_NOEXCEPT - : samplerMirrorClampToEdge( samplerMirrorClampToEdge_ ) - , drawIndirectCount( drawIndirectCount_ ) - , storageBuffer8BitAccess( storageBuffer8BitAccess_ ) - , uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ) - , storagePushConstant8( storagePushConstant8_ ) - , shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ) - , shaderSharedInt64Atomics( shaderSharedInt64Atomics_ ) - , shaderFloat16( shaderFloat16_ ) - , shaderInt8( shaderInt8_ ) - , descriptorIndexing( descriptorIndexing_ ) - , shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ) - , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ) - , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ) - , shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ) - , shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ) - , shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ) - , shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ) - , shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ) - , shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ) - , shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ) - , descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ) - , descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ) - , descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ) - , descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ) - , descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ) - , descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ) - , descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ) - , descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ) - , descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ) - , runtimeDescriptorArray( runtimeDescriptorArray_ ) - , samplerFilterMinmax( samplerFilterMinmax_ ) - , scalarBlockLayout( scalarBlockLayout_ ) - , imagelessFramebuffer( imagelessFramebuffer_ ) - , uniformBufferStandardLayout( uniformBufferStandardLayout_ ) - , shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ ) - , separateDepthStencilLayouts( separateDepthStencilLayouts_ ) - , hostQueryReset( hostQueryReset_ ) - , timelineSemaphore( timelineSemaphore_ ) - , bufferDeviceAddress( bufferDeviceAddress_ ) - , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ) - , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ) - , vulkanMemoryModel( vulkanMemoryModel_ ) - , vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ ) - , vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ ) - , shaderOutputViewportIndex( shaderOutputViewportIndex_ ) - , shaderOutputLayer( shaderOutputLayer_ ) - , subgroupBroadcastDynamicId( subgroupBroadcastDynamicId_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Features; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features(VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ = {}, VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}, VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ = {}) VULKAN_HPP_NOEXCEPT + : samplerMirrorClampToEdge( samplerMirrorClampToEdge_ ), drawIndirectCount( drawIndirectCount_ ), storageBuffer8BitAccess( storageBuffer8BitAccess_ ), uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ), storagePushConstant8( storagePushConstant8_ ), shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ), shaderSharedInt64Atomics( shaderSharedInt64Atomics_ ), shaderFloat16( shaderFloat16_ ), shaderInt8( shaderInt8_ ), descriptorIndexing( descriptorIndexing_ ), shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ), shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ), shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ), shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ), shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ), shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ), shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ), shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ), shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ), shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ), descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ), descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ), descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ), descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ), descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ), descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ), descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ), descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ), descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ), runtimeDescriptorArray( runtimeDescriptorArray_ ), samplerFilterMinmax( samplerFilterMinmax_ ), scalarBlockLayout( scalarBlockLayout_ ), imagelessFramebuffer( imagelessFramebuffer_ ), uniformBufferStandardLayout( uniformBufferStandardLayout_ ), shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ ), separateDepthStencilLayouts( separateDepthStencilLayouts_ ), hostQueryReset( hostQueryReset_ ), timelineSemaphore( timelineSemaphore_ ), bufferDeviceAddress( bufferDeviceAddress_ ), bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ), bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ), vulkanMemoryModel( vulkanMemoryModel_ ), vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ ), vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ ), shaderOutputViewportIndex( shaderOutputViewportIndex_ ), shaderOutputLayer( shaderOutputLayer_ ), subgroupBroadcastDynamicId( subgroupBroadcastDynamicId_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features ) - offsetof( PhysicalDeviceVulkan12Features, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVulkan12Features( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceVulkan12Features& operator=( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & operator=( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceVulkan12Features & operator=( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceVulkan12Features ) ); return *this; } @@ -50686,6 +71526,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceVulkan12Features const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -50696,6 +71537,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceVulkan12Features const& ) const = default; +#else bool operator==( PhysicalDeviceVulkan12Features const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -50753,6 +71598,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Features; @@ -50804,455 +71652,47 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer = {}; VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId = {}; + }; static_assert( sizeof( PhysicalDeviceVulkan12Features ) == sizeof( VkPhysicalDeviceVulkan12Features ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan12Features; + }; + struct PhysicalDeviceVulkan12Properties { - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, - std::array const& driverName_ = {}, - std::array const& driverInfo_ = {}, - VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, - uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, - uint32_t maxPerStageUpdateAfterBindResources_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, - VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, - VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, - uint64_t maxTimelineSemaphoreValueDifference_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {} ) VULKAN_HPP_NOEXCEPT - : driverID( driverID_ ) - , driverName{} - , driverInfo{} - , conformanceVersion( conformanceVersion_ ) - , denormBehaviorIndependence( denormBehaviorIndependence_ ) - , roundingModeIndependence( roundingModeIndependence_ ) - , shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ) - , shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ) - , shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ) - , shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ) - , shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ) - , shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ) - , shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ) - , shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ) - , shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ) - , shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ) - , shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ) - , shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ) - , shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ) - , shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ) - , shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ ) - , maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ) - , shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ) - , shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ) - , shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ) - , shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ) - , shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ) - , robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ) - , quadDivergentImplicitLod( quadDivergentImplicitLod_ ) - , maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ) - , maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ) - , maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ) - , maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ) - , maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ) - , maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ) - , maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ) - , maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ) - , maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ) - , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ) - , maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ) - , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ) - , maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ) - , maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ) - , maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ ) - , supportedDepthResolveModes( supportedDepthResolveModes_ ) - , supportedStencilResolveModes( supportedStencilResolveModes_ ) - , independentResolveNone( independentResolveNone_ ) - , independentResolve( independentResolve_ ) - , filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ) - , filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ ) - , maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ ) - , framebufferIntegerColorSampleCounts( framebufferIntegerColorSampleCounts_ ) - { - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( driverName, driverName_ ); - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( driverInfo, driverInfo_ ); - } + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Properties; - VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties ) - offsetof( PhysicalDeviceVulkan12Properties, pNext ) ); - return *this; - } +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties(VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, std::array const& driverName_ = {}, std::array const& driverInfo_ = {}, VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, uint32_t maxPerStageUpdateAfterBindResources_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, uint64_t maxTimelineSemaphoreValueDifference_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {}) VULKAN_HPP_NOEXCEPT + : driverID( driverID_ ), driverName( driverName_ ), driverInfo( driverInfo_ ), conformanceVersion( conformanceVersion_ ), denormBehaviorIndependence( denormBehaviorIndependence_ ), roundingModeIndependence( roundingModeIndependence_ ), shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ), shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ), shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ), shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ), shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ), shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ), shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ), shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ), shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ), shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ), shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ), shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ), shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ), shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ), shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ ), maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ), shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ), shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ), shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ), shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ), shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ), robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ), quadDivergentImplicitLod( quadDivergentImplicitLod_ ), maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ), maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ), maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ), maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ), maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ), maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ), maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ), maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ), maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ), maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ), maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ), maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ), maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ), maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ), maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ ), supportedDepthResolveModes( supportedDepthResolveModes_ ), supportedStencilResolveModes( supportedStencilResolveModes_ ), independentResolveNone( independentResolveNone_ ), independentResolve( independentResolve_ ), filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ), filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ ), maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ ), framebufferIntegerColorSampleCounts( framebufferIntegerColorSampleCounts_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVulkan12Properties( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceVulkan12Properties& operator=( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Properties & operator=( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceVulkan12Properties & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Properties & operator=( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceVulkan12Properties ) ); return *this; } - PhysicalDeviceVulkan12Properties & setDriverID( VULKAN_HPP_NAMESPACE::DriverId driverID_ ) VULKAN_HPP_NOEXCEPT - { - driverID = driverID_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setDriverName( std::array driverName_ ) VULKAN_HPP_NOEXCEPT - { - memcpy( driverName, driverName_.data(), VK_MAX_DRIVER_NAME_SIZE * sizeof( char ) ); - return *this; - } - - PhysicalDeviceVulkan12Properties & setDriverInfo( std::array driverInfo_ ) VULKAN_HPP_NOEXCEPT - { - memcpy( driverInfo, driverInfo_.data(), VK_MAX_DRIVER_INFO_SIZE * sizeof( char ) ); - return *this; - } - - PhysicalDeviceVulkan12Properties & setConformanceVersion( VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ ) VULKAN_HPP_NOEXCEPT - { - conformanceVersion = conformanceVersion_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setDenormBehaviorIndependence( VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ ) VULKAN_HPP_NOEXCEPT - { - denormBehaviorIndependence = denormBehaviorIndependence_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setRoundingModeIndependence( VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ ) VULKAN_HPP_NOEXCEPT - { - roundingModeIndependence = roundingModeIndependence_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setShaderSignedZeroInfNanPreserveFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ ) VULKAN_HPP_NOEXCEPT - { - shaderSignedZeroInfNanPreserveFloat16 = shaderSignedZeroInfNanPreserveFloat16_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setShaderSignedZeroInfNanPreserveFloat32( VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ ) VULKAN_HPP_NOEXCEPT - { - shaderSignedZeroInfNanPreserveFloat32 = shaderSignedZeroInfNanPreserveFloat32_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setShaderSignedZeroInfNanPreserveFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ ) VULKAN_HPP_NOEXCEPT - { - shaderSignedZeroInfNanPreserveFloat64 = shaderSignedZeroInfNanPreserveFloat64_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setShaderDenormPreserveFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ ) VULKAN_HPP_NOEXCEPT - { - shaderDenormPreserveFloat16 = shaderDenormPreserveFloat16_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setShaderDenormPreserveFloat32( VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ ) VULKAN_HPP_NOEXCEPT - { - shaderDenormPreserveFloat32 = shaderDenormPreserveFloat32_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setShaderDenormPreserveFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ ) VULKAN_HPP_NOEXCEPT - { - shaderDenormPreserveFloat64 = shaderDenormPreserveFloat64_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setShaderDenormFlushToZeroFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ ) VULKAN_HPP_NOEXCEPT - { - shaderDenormFlushToZeroFloat16 = shaderDenormFlushToZeroFloat16_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setShaderDenormFlushToZeroFloat32( VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ ) VULKAN_HPP_NOEXCEPT - { - shaderDenormFlushToZeroFloat32 = shaderDenormFlushToZeroFloat32_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setShaderDenormFlushToZeroFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ ) VULKAN_HPP_NOEXCEPT - { - shaderDenormFlushToZeroFloat64 = shaderDenormFlushToZeroFloat64_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setShaderRoundingModeRTEFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ ) VULKAN_HPP_NOEXCEPT - { - shaderRoundingModeRTEFloat16 = shaderRoundingModeRTEFloat16_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setShaderRoundingModeRTEFloat32( VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ ) VULKAN_HPP_NOEXCEPT - { - shaderRoundingModeRTEFloat32 = shaderRoundingModeRTEFloat32_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setShaderRoundingModeRTEFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ ) VULKAN_HPP_NOEXCEPT - { - shaderRoundingModeRTEFloat64 = shaderRoundingModeRTEFloat64_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setShaderRoundingModeRTZFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ ) VULKAN_HPP_NOEXCEPT - { - shaderRoundingModeRTZFloat16 = shaderRoundingModeRTZFloat16_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setShaderRoundingModeRTZFloat32( VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ ) VULKAN_HPP_NOEXCEPT - { - shaderRoundingModeRTZFloat32 = shaderRoundingModeRTZFloat32_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setShaderRoundingModeRTZFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ ) VULKAN_HPP_NOEXCEPT - { - shaderRoundingModeRTZFloat64 = shaderRoundingModeRTZFloat64_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setMaxUpdateAfterBindDescriptorsInAllPools( uint32_t maxUpdateAfterBindDescriptorsInAllPools_ ) VULKAN_HPP_NOEXCEPT - { - maxUpdateAfterBindDescriptorsInAllPools = maxUpdateAfterBindDescriptorsInAllPools_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setShaderUniformBufferArrayNonUniformIndexingNative( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ ) VULKAN_HPP_NOEXCEPT - { - shaderUniformBufferArrayNonUniformIndexingNative = shaderUniformBufferArrayNonUniformIndexingNative_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setShaderSampledImageArrayNonUniformIndexingNative( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ ) VULKAN_HPP_NOEXCEPT - { - shaderSampledImageArrayNonUniformIndexingNative = shaderSampledImageArrayNonUniformIndexingNative_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setShaderStorageBufferArrayNonUniformIndexingNative( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ ) VULKAN_HPP_NOEXCEPT - { - shaderStorageBufferArrayNonUniformIndexingNative = shaderStorageBufferArrayNonUniformIndexingNative_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setShaderStorageImageArrayNonUniformIndexingNative( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ ) VULKAN_HPP_NOEXCEPT - { - shaderStorageImageArrayNonUniformIndexingNative = shaderStorageImageArrayNonUniformIndexingNative_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setShaderInputAttachmentArrayNonUniformIndexingNative( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ ) VULKAN_HPP_NOEXCEPT - { - shaderInputAttachmentArrayNonUniformIndexingNative = shaderInputAttachmentArrayNonUniformIndexingNative_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setRobustBufferAccessUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT - { - robustBufferAccessUpdateAfterBind = robustBufferAccessUpdateAfterBind_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setQuadDivergentImplicitLod( VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ ) VULKAN_HPP_NOEXCEPT - { - quadDivergentImplicitLod = quadDivergentImplicitLod_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setMaxPerStageDescriptorUpdateAfterBindSamplers( uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ ) VULKAN_HPP_NOEXCEPT - { - maxPerStageDescriptorUpdateAfterBindSamplers = maxPerStageDescriptorUpdateAfterBindSamplers_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setMaxPerStageDescriptorUpdateAfterBindUniformBuffers( uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ) VULKAN_HPP_NOEXCEPT - { - maxPerStageDescriptorUpdateAfterBindUniformBuffers = maxPerStageDescriptorUpdateAfterBindUniformBuffers_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setMaxPerStageDescriptorUpdateAfterBindStorageBuffers( uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ) VULKAN_HPP_NOEXCEPT - { - maxPerStageDescriptorUpdateAfterBindStorageBuffers = maxPerStageDescriptorUpdateAfterBindStorageBuffers_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setMaxPerStageDescriptorUpdateAfterBindSampledImages( uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ ) VULKAN_HPP_NOEXCEPT - { - maxPerStageDescriptorUpdateAfterBindSampledImages = maxPerStageDescriptorUpdateAfterBindSampledImages_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setMaxPerStageDescriptorUpdateAfterBindStorageImages( uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ ) VULKAN_HPP_NOEXCEPT - { - maxPerStageDescriptorUpdateAfterBindStorageImages = maxPerStageDescriptorUpdateAfterBindStorageImages_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setMaxPerStageDescriptorUpdateAfterBindInputAttachments( uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ ) VULKAN_HPP_NOEXCEPT - { - maxPerStageDescriptorUpdateAfterBindInputAttachments = maxPerStageDescriptorUpdateAfterBindInputAttachments_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setMaxPerStageUpdateAfterBindResources( uint32_t maxPerStageUpdateAfterBindResources_ ) VULKAN_HPP_NOEXCEPT - { - maxPerStageUpdateAfterBindResources = maxPerStageUpdateAfterBindResources_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setMaxDescriptorSetUpdateAfterBindSamplers( uint32_t maxDescriptorSetUpdateAfterBindSamplers_ ) VULKAN_HPP_NOEXCEPT - { - maxDescriptorSetUpdateAfterBindSamplers = maxDescriptorSetUpdateAfterBindSamplers_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setMaxDescriptorSetUpdateAfterBindUniformBuffers( uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ ) VULKAN_HPP_NOEXCEPT - { - maxDescriptorSetUpdateAfterBindUniformBuffers = maxDescriptorSetUpdateAfterBindUniformBuffers_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setMaxDescriptorSetUpdateAfterBindUniformBuffersDynamic( uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ) VULKAN_HPP_NOEXCEPT - { - maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setMaxDescriptorSetUpdateAfterBindStorageBuffers( uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ ) VULKAN_HPP_NOEXCEPT - { - maxDescriptorSetUpdateAfterBindStorageBuffers = maxDescriptorSetUpdateAfterBindStorageBuffers_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setMaxDescriptorSetUpdateAfterBindStorageBuffersDynamic( uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ) VULKAN_HPP_NOEXCEPT - { - maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setMaxDescriptorSetUpdateAfterBindSampledImages( uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ ) VULKAN_HPP_NOEXCEPT - { - maxDescriptorSetUpdateAfterBindSampledImages = maxDescriptorSetUpdateAfterBindSampledImages_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setMaxDescriptorSetUpdateAfterBindStorageImages( uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ ) VULKAN_HPP_NOEXCEPT - { - maxDescriptorSetUpdateAfterBindStorageImages = maxDescriptorSetUpdateAfterBindStorageImages_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setMaxDescriptorSetUpdateAfterBindInputAttachments( uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ ) VULKAN_HPP_NOEXCEPT - { - maxDescriptorSetUpdateAfterBindInputAttachments = maxDescriptorSetUpdateAfterBindInputAttachments_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setSupportedDepthResolveModes( VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ ) VULKAN_HPP_NOEXCEPT - { - supportedDepthResolveModes = supportedDepthResolveModes_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setSupportedStencilResolveModes( VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ ) VULKAN_HPP_NOEXCEPT - { - supportedStencilResolveModes = supportedStencilResolveModes_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setIndependentResolveNone( VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ ) VULKAN_HPP_NOEXCEPT - { - independentResolveNone = independentResolveNone_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setIndependentResolve( VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ ) VULKAN_HPP_NOEXCEPT - { - independentResolve = independentResolve_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setFilterMinmaxSingleComponentFormats( VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ ) VULKAN_HPP_NOEXCEPT - { - filterMinmaxSingleComponentFormats = filterMinmaxSingleComponentFormats_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setFilterMinmaxImageComponentMapping( VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ ) VULKAN_HPP_NOEXCEPT - { - filterMinmaxImageComponentMapping = filterMinmaxImageComponentMapping_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setMaxTimelineSemaphoreValueDifference( uint64_t maxTimelineSemaphoreValueDifference_ ) VULKAN_HPP_NOEXCEPT - { - maxTimelineSemaphoreValueDifference = maxTimelineSemaphoreValueDifference_; - return *this; - } - - PhysicalDeviceVulkan12Properties & setFramebufferIntegerColorSampleCounts( VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ ) VULKAN_HPP_NOEXCEPT - { - framebufferIntegerColorSampleCounts = framebufferIntegerColorSampleCounts_; - return *this; - } operator VkPhysicalDeviceVulkan12Properties const&() const VULKAN_HPP_NOEXCEPT { @@ -51264,13 +71704,17 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceVulkan12Properties const& ) const = default; +#else bool operator==( PhysicalDeviceVulkan12Properties const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) - && ( memcmp( driverName, rhs.driverName, VK_MAX_DRIVER_NAME_SIZE * sizeof( char ) ) == 0 ) - && ( memcmp( driverInfo, rhs.driverInfo, VK_MAX_DRIVER_INFO_SIZE * sizeof( char ) ) == 0 ) + && ( driverName == rhs.driverName ) + && ( driverInfo == rhs.driverInfo ) && ( conformanceVersion == rhs.conformanceVersion ) && ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) && ( roundingModeIndependence == rhs.roundingModeIndependence ) @@ -51326,13 +71770,16 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Properties; void* pNext = {}; VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary; - char driverName[VK_MAX_DRIVER_NAME_SIZE] = {}; - char driverInfo[VK_MAX_DRIVER_INFO_SIZE] = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverInfo = {}; VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {}; VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; @@ -51382,34 +71829,44 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {}; uint64_t maxTimelineSemaphoreValueDifference = {}; VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts = {}; + }; static_assert( sizeof( PhysicalDeviceVulkan12Properties ) == sizeof( VkPhysicalDeviceVulkan12Properties ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan12Properties; + }; + struct PhysicalDeviceVulkanMemoryModelFeatures { - VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {} ) VULKAN_HPP_NOEXCEPT - : vulkanMemoryModel( vulkanMemoryModel_ ) - , vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ ) - , vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures(VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}) VULKAN_HPP_NOEXCEPT + : vulkanMemoryModel( vulkanMemoryModel_ ), vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ ), vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures ) - offsetof( PhysicalDeviceVulkanMemoryModelFeatures, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVulkanMemoryModelFeatures( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceVulkanMemoryModelFeatures& operator=( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkanMemoryModelFeatures & operator=( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceVulkanMemoryModelFeatures & operator=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceVulkanMemoryModelFeatures ) ); return *this; } @@ -51437,6 +71894,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceVulkanMemoryModelFeatures const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -51447,6 +71905,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceVulkanMemoryModelFeatures const& ) const = default; +#else bool operator==( PhysicalDeviceVulkanMemoryModelFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -51460,6 +71922,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures; @@ -51467,30 +71932,45 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {}; VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {}; VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {}; + }; static_assert( sizeof( PhysicalDeviceVulkanMemoryModelFeatures ) == sizeof( VkPhysicalDeviceVulkanMemoryModelFeatures ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceVulkanMemoryModelFeatures; + }; + using PhysicalDeviceVulkanMemoryModelFeaturesKHR = PhysicalDeviceVulkanMemoryModelFeatures; + struct PhysicalDeviceYcbcrImageArraysFeaturesEXT { - VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = {} ) VULKAN_HPP_NOEXCEPT - : ycbcrImageArrays( ycbcrImageArrays_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = {}) VULKAN_HPP_NOEXCEPT + : ycbcrImageArrays( ycbcrImageArrays_ ) {} - VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT ) - offsetof( PhysicalDeviceYcbcrImageArraysFeaturesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PhysicalDeviceYcbcrImageArraysFeaturesEXT& operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceYcbcrImageArraysFeaturesEXT ) ); return *this; } @@ -51506,6 +71986,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -51516,6 +71997,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceYcbcrImageArraysFeaturesEXT const& ) const = default; +#else bool operator==( PhysicalDeviceYcbcrImageArraysFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -51527,124 +72012,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays = {}; + }; static_assert( sizeof( PhysicalDeviceYcbcrImageArraysFeaturesEXT ) == sizeof( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct PipelineCacheCreateInfo + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ = {}, - size_t initialDataSize_ = {}, - const void* pInitialData_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , initialDataSize( initialDataSize_ ) - , pInitialData( pInitialData_ ) - {} - - VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo ) - offsetof( PipelineCacheCreateInfo, pNext ) ); - return *this; - } - - PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineCacheCreateInfo& operator=( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineCacheCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PipelineCacheCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - PipelineCacheCreateInfo & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT - { - initialDataSize = initialDataSize_; - return *this; - } - - PipelineCacheCreateInfo & setPInitialData( const void* pInitialData_ ) VULKAN_HPP_NOEXCEPT - { - pInitialData = pInitialData_; - return *this; - } - - operator VkPipelineCacheCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPipelineCacheCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineCacheCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( initialDataSize == rhs.initialDataSize ) - && ( pInitialData == rhs.pInitialData ); - } - - bool operator!=( PipelineCacheCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCacheCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags = {}; - size_t initialDataSize = {}; - const void* pInitialData = {}; + using Type = PhysicalDeviceYcbcrImageArraysFeaturesEXT; }; - static_assert( sizeof( PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PipelineColorBlendAdvancedStateCreateInfoEXT { - VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {}, - VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated ) VULKAN_HPP_NOEXCEPT - : srcPremultiplied( srcPremultiplied_ ) - , dstPremultiplied( dstPremultiplied_ ) - , blendOverlap( blendOverlap_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {}, VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated) VULKAN_HPP_NOEXCEPT + : srcPremultiplied( srcPremultiplied_ ), dstPremultiplied( dstPremultiplied_ ), blendOverlap( blendOverlap_ ) {} - VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT ) - offsetof( PipelineColorBlendAdvancedStateCreateInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PipelineColorBlendAdvancedStateCreateInfoEXT& operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) ); return *this; } @@ -51672,6 +72085,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -51682,6 +72096,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineColorBlendAdvancedStateCreateInfoEXT const& ) const = default; +#else bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -51695,6 +72113,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT; @@ -51702,30 +72123,44 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {}; VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {}; VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated; + }; static_assert( sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) == sizeof( VkPipelineColorBlendAdvancedStateCreateInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PipelineColorBlendAdvancedStateCreateInfoEXT; + }; + struct PipelineCompilerControlCreateInfoAMD { - VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ = {} ) VULKAN_HPP_NOEXCEPT - : compilerControlFlags( compilerControlFlags_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCompilerControlCreateInfoAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD(VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ = {}) VULKAN_HPP_NOEXCEPT + : compilerControlFlags( compilerControlFlags_ ) {} - VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD & operator=( VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD ) - offsetof( PipelineCompilerControlCreateInfoAMD, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineCompilerControlCreateInfoAMD( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PipelineCompilerControlCreateInfoAMD& operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineCompilerControlCreateInfoAMD & operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineCompilerControlCreateInfoAMD & operator=( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineCompilerControlCreateInfoAMD ) ); return *this; } @@ -51741,6 +72176,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPipelineCompilerControlCreateInfoAMD const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -51751,6 +72187,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineCompilerControlCreateInfoAMD const& ) const = default; +#else bool operator==( PipelineCompilerControlCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -51762,43 +72202,58 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCompilerControlCreateInfoAMD; const void* pNext = {}; VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags = {}; + }; static_assert( sizeof( PipelineCompilerControlCreateInfoAMD ) == sizeof( VkPipelineCompilerControlCreateInfoAMD ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PipelineCompilerControlCreateInfoAMD; + }; + struct PipelineCoverageModulationStateCreateInfoNV { - VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ = {}, - VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone, - VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ = {}, - uint32_t coverageModulationTableCount_ = {}, - const float* pCoverageModulationTable_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , coverageModulationMode( coverageModulationMode_ ) - , coverageModulationTableEnable( coverageModulationTableEnable_ ) - , coverageModulationTableCount( coverageModulationTableCount_ ) - , pCoverageModulationTable( pCoverageModulationTable_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageModulationStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone, VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ = {}, uint32_t coverageModulationTableCount_ = {}, const float* pCoverageModulationTable_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), coverageModulationMode( coverageModulationMode_ ), coverageModulationTableEnable( coverageModulationTableEnable_ ), coverageModulationTableCount( coverageModulationTableCount_ ), pCoverageModulationTable( pCoverageModulationTable_ ) {} - VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV ) - offsetof( PipelineCoverageModulationStateCreateInfoNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineCoverageModulationStateCreateInfoNV& operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineCoverageModulationStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_, VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_, VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & coverageModulationTable_ ) + : flags( flags_ ), coverageModulationMode( coverageModulationMode_ ), coverageModulationTableEnable( coverageModulationTableEnable_ ), coverageModulationTableCount( static_cast( coverageModulationTable_.size() ) ), pCoverageModulationTable( coverageModulationTable_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PipelineCoverageModulationStateCreateInfoNV & operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineCoverageModulationStateCreateInfoNV & operator=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineCoverageModulationStateCreateInfoNV ) ); return *this; } @@ -51838,6 +72293,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTable( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & coverageModulationTable_ ) VULKAN_HPP_NOEXCEPT + { + coverageModulationTableCount = static_cast( coverageModulationTable_.size() ); + pCoverageModulationTable = coverageModulationTable_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkPipelineCoverageModulationStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -51848,6 +72313,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineCoverageModulationStateCreateInfoNV const& ) const = default; +#else bool operator==( PipelineCoverageModulationStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -51863,6 +72332,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV; @@ -51872,32 +72344,44 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable = {}; uint32_t coverageModulationTableCount = {}; const float* pCoverageModulationTable = {}; + }; static_assert( sizeof( PipelineCoverageModulationStateCreateInfoNV ) == sizeof( VkPipelineCoverageModulationStateCreateInfoNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PipelineCoverageModulationStateCreateInfoNV; + }; + struct PipelineCoverageReductionStateCreateInfoNV { - VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ = {}, - VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , coverageReductionMode( coverageReductionMode_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageReductionStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), coverageReductionMode( coverageReductionMode_ ) {} - VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV ) - offsetof( PipelineCoverageReductionStateCreateInfoNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineCoverageReductionStateCreateInfoNV( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PipelineCoverageReductionStateCreateInfoNV& operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineCoverageReductionStateCreateInfoNV & operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineCoverageReductionStateCreateInfoNV & operator=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineCoverageReductionStateCreateInfoNV ) ); return *this; } @@ -51919,6 +72403,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPipelineCoverageReductionStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -51929,6 +72414,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineCoverageReductionStateCreateInfoNV const& ) const = default; +#else bool operator==( PipelineCoverageReductionStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -51941,40 +72430,53 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageReductionStateCreateInfoNV; const void* pNext = {}; VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags = {}; VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge; + }; static_assert( sizeof( PipelineCoverageReductionStateCreateInfoNV ) == sizeof( VkPipelineCoverageReductionStateCreateInfoNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PipelineCoverageReductionStateCreateInfoNV; + }; + struct PipelineCoverageToColorStateCreateInfoNV { - VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ = {}, - uint32_t coverageToColorLocation_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , coverageToColorEnable( coverageToColorEnable_ ) - , coverageToColorLocation( coverageToColorLocation_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageToColorStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ = {}, uint32_t coverageToColorLocation_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), coverageToColorEnable( coverageToColorEnable_ ), coverageToColorLocation( coverageToColorLocation_ ) {} - VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV ) - offsetof( PipelineCoverageToColorStateCreateInfoNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PipelineCoverageToColorStateCreateInfoNV& operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineCoverageToColorStateCreateInfoNV & operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineCoverageToColorStateCreateInfoNV & operator=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineCoverageToColorStateCreateInfoNV ) ); return *this; } @@ -52002,6 +72504,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPipelineCoverageToColorStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -52012,6 +72515,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineCoverageToColorStateCreateInfoNV const& ) const = default; +#else bool operator==( PipelineCoverageToColorStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -52025,6 +72532,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV; @@ -52032,29 +72542,47 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags = {}; VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable = {}; uint32_t coverageToColorLocation = {}; + }; static_assert( sizeof( PipelineCoverageToColorStateCreateInfoNV ) == sizeof( VkPipelineCoverageToColorStateCreateInfoNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PipelineCoverageToColorStateCreateInfoNV; + }; + struct PipelineCreationFeedbackEXT { - PipelineCreationFeedbackEXT( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT flags_ = {}, - uint64_t duration_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , duration( duration_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackEXT(VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT flags_ = {}, uint64_t duration_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), duration( duration_ ) {} + VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackEXT( PipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineCreationFeedbackEXT( VkPipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PipelineCreationFeedbackEXT& operator=( VkPipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineCreationFeedbackEXT & operator=( VkPipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PipelineCreationFeedbackEXT & operator=( PipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineCreationFeedbackEXT ) ); + return *this; + } + + operator VkPipelineCreationFeedbackEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -52065,6 +72593,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineCreationFeedbackEXT const& ) const = default; +#else bool operator==( PipelineCreationFeedbackEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( flags == rhs.flags ) @@ -52075,38 +72607,51 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT flags = {}; uint64_t duration = {}; + }; static_assert( sizeof( PipelineCreationFeedbackEXT ) == sizeof( VkPipelineCreationFeedbackEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PipelineCreationFeedbackCreateInfoEXT { - VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback_ = {}, - uint32_t pipelineStageCreationFeedbackCount_ = {}, - VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks_ = {} ) VULKAN_HPP_NOEXCEPT - : pPipelineCreationFeedback( pPipelineCreationFeedback_ ) - , pipelineStageCreationFeedbackCount( pipelineStageCreationFeedbackCount_ ) - , pPipelineStageCreationFeedbacks( pPipelineStageCreationFeedbacks_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCreationFeedbackCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback_ = {}, uint32_t pipelineStageCreationFeedbackCount_ = {}, VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks_ = {}) VULKAN_HPP_NOEXCEPT + : pPipelineCreationFeedback( pPipelineCreationFeedback_ ), pipelineStageCreationFeedbackCount( pipelineStageCreationFeedbackCount_ ), pPipelineStageCreationFeedbacks( pPipelineStageCreationFeedbacks_ ) {} - VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfoEXT ) - offsetof( PipelineCreationFeedbackCreateInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfoEXT( PipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineCreationFeedbackCreateInfoEXT( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineCreationFeedbackCreateInfoEXT& operator=( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineCreationFeedbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineStageCreationFeedbacks_ ) + : pPipelineCreationFeedback( pPipelineCreationFeedback_ ), pipelineStageCreationFeedbackCount( static_cast( pipelineStageCreationFeedbacks_.size() ) ), pPipelineStageCreationFeedbacks( pipelineStageCreationFeedbacks_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PipelineCreationFeedbackCreateInfoEXT & operator=( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineCreationFeedbackCreateInfoEXT & operator=( PipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineCreationFeedbackCreateInfoEXT ) ); return *this; } @@ -52134,6 +72679,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineCreationFeedbackCreateInfoEXT & setPipelineStageCreationFeedbacks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT + { + pipelineStageCreationFeedbackCount = static_cast( pipelineStageCreationFeedbacks_.size() ); + pPipelineStageCreationFeedbacks = pipelineStageCreationFeedbacks_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkPipelineCreationFeedbackCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -52144,6 +72699,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineCreationFeedbackCreateInfoEXT const& ) const = default; +#else bool operator==( PipelineCreationFeedbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -52157,6 +72716,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreationFeedbackCreateInfoEXT; @@ -52164,36 +72726,50 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback = {}; uint32_t pipelineStageCreationFeedbackCount = {}; VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks = {}; + }; static_assert( sizeof( PipelineCreationFeedbackCreateInfoEXT ) == sizeof( VkPipelineCreationFeedbackCreateInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PipelineCreationFeedbackCreateInfoEXT; + }; + struct PipelineDiscardRectangleStateCreateInfoEXT { - VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = {}, - VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive, - uint32_t discardRectangleCount_ = {}, - const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , discardRectangleMode( discardRectangleMode_ ) - , discardRectangleCount( discardRectangleCount_ ) - , pDiscardRectangles( pDiscardRectangles_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive, uint32_t discardRectangleCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), discardRectangleMode( discardRectangleMode_ ), discardRectangleCount( discardRectangleCount_ ), pDiscardRectangles( pDiscardRectangles_ ) {} - VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT ) - offsetof( PipelineDiscardRectangleStateCreateInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineDiscardRectangleStateCreateInfoEXT& operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineDiscardRectangleStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_, VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & discardRectangles_ ) + : flags( flags_ ), discardRectangleMode( discardRectangleMode_ ), discardRectangleCount( static_cast( discardRectangles_.size() ) ), pDiscardRectangles( discardRectangles_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PipelineDiscardRectangleStateCreateInfoEXT & operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineDiscardRectangleStateCreateInfoEXT & operator=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) ); return *this; } @@ -52227,6 +72803,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & discardRectangles_ ) VULKAN_HPP_NOEXCEPT + { + discardRectangleCount = static_cast( discardRectangles_.size() ); + pDiscardRectangles = discardRectangles_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkPipelineDiscardRectangleStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -52237,6 +72823,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineDiscardRectangleStateCreateInfoEXT const& ) const = default; +#else bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -52251,6 +72841,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT; @@ -52259,572 +72852,44 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive; uint32_t discardRectangleCount = {}; const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles = {}; + }; static_assert( sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) == sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct PipelineExecutableInfoKHR + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, - uint32_t executableIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : pipeline( pipeline_ ) - , executableIndex( executableIndex_ ) - {} - - VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR ) - offsetof( PipelineExecutableInfoKHR, pNext ) ); - return *this; - } - - PipelineExecutableInfoKHR( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineExecutableInfoKHR& operator=( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineExecutableInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PipelineExecutableInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT - { - pipeline = pipeline_; - return *this; - } - - PipelineExecutableInfoKHR & setExecutableIndex( uint32_t executableIndex_ ) VULKAN_HPP_NOEXCEPT - { - executableIndex = executableIndex_; - return *this; - } - - operator VkPipelineExecutableInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPipelineExecutableInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineExecutableInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pipeline == rhs.pipeline ) - && ( executableIndex == rhs.executableIndex ); - } - - bool operator!=( PipelineExecutableInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; - uint32_t executableIndex = {}; + using Type = PipelineDiscardRectangleStateCreateInfoEXT; }; - static_assert( sizeof( PipelineExecutableInfoKHR ) == sizeof( VkPipelineExecutableInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct PipelineExecutableInternalRepresentationKHR - { - PipelineExecutableInternalRepresentationKHR( std::array const& name_ = {}, - std::array const& description_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 isText_ = {}, - size_t dataSize_ = {}, - void* pData_ = {} ) VULKAN_HPP_NOEXCEPT - : name{} - , description{} - , isText( isText_ ) - , dataSize( dataSize_ ) - , pData( pData_ ) - { - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( name, name_ ); - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( description, description_ ); - } - - VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR ) - offsetof( PipelineExecutableInternalRepresentationKHR, pNext ) ); - return *this; - } - - PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineExecutableInternalRepresentationKHR& operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkPipelineExecutableInternalRepresentationKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPipelineExecutableInternalRepresentationKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineExecutableInternalRepresentationKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memcmp( name, rhs.name, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 ) - && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 ) - && ( isText == rhs.isText ) - && ( dataSize == rhs.dataSize ) - && ( pData == rhs.pData ); - } - - bool operator!=( PipelineExecutableInternalRepresentationKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInternalRepresentationKHR; - void* pNext = {}; - char name[VK_MAX_DESCRIPTION_SIZE] = {}; - char description[VK_MAX_DESCRIPTION_SIZE] = {}; - VULKAN_HPP_NAMESPACE::Bool32 isText = {}; - size_t dataSize = {}; - void* pData = {}; - }; - static_assert( sizeof( PipelineExecutableInternalRepresentationKHR ) == sizeof( VkPipelineExecutableInternalRepresentationKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct PipelineExecutablePropertiesKHR - { - PipelineExecutablePropertiesKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags stages_ = {}, - std::array const& name_ = {}, - std::array const& description_ = {}, - uint32_t subgroupSize_ = {} ) VULKAN_HPP_NOEXCEPT - : stages( stages_ ) - , name{} - , description{} - , subgroupSize( subgroupSize_ ) - { - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( name, name_ ); - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( description, description_ ); - } - - VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR ) - offsetof( PipelineExecutablePropertiesKHR, pNext ) ); - return *this; - } - - PipelineExecutablePropertiesKHR( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineExecutablePropertiesKHR& operator=( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkPipelineExecutablePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPipelineExecutablePropertiesKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineExecutablePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( stages == rhs.stages ) - && ( memcmp( name, rhs.name, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 ) - && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 ) - && ( subgroupSize == rhs.subgroupSize ); - } - - bool operator!=( PipelineExecutablePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutablePropertiesKHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ShaderStageFlags stages = {}; - char name[VK_MAX_DESCRIPTION_SIZE] = {}; - char description[VK_MAX_DESCRIPTION_SIZE] = {}; - uint32_t subgroupSize = {}; - }; - static_assert( sizeof( PipelineExecutablePropertiesKHR ) == sizeof( VkPipelineExecutablePropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - union PipelineExecutableStatisticValueKHR - { - VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( this, &rhs, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) ); - return *this; - } - - operator VkPipelineExecutableStatisticValueKHR const&() const - { - return *reinterpret_cast(this); - } - - operator VkPipelineExecutableStatisticValueKHR &() - { - return *reinterpret_cast(this); - } - -#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS - VULKAN_HPP_NAMESPACE::Bool32 b32; - int64_t i64; - uint64_t u64; - double f64; -#else - VkBool32 b32; - int64_t i64; - uint64_t u64; - double f64; -#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ - }; - - struct PipelineExecutableStatisticKHR - { - PipelineExecutableStatisticKHR( std::array const& name_ = {}, - std::array const& description_ = {}, - VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32, - VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = {} ) VULKAN_HPP_NOEXCEPT - : name{} - , description{} - , format( format_ ) - , value( value_ ) - { - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( name, name_ ); - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( description, description_ ); - } - - VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR ) - offsetof( PipelineExecutableStatisticKHR, pNext ) ); - return *this; - } - - PipelineExecutableStatisticKHR( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineExecutableStatisticKHR& operator=( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkPipelineExecutableStatisticKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPipelineExecutableStatisticKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableStatisticKHR; - void* pNext = {}; - char name[VK_MAX_DESCRIPTION_SIZE] = {}; - char description[VK_MAX_DESCRIPTION_SIZE] = {}; - VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32; - VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value = {}; - }; - static_assert( sizeof( PipelineExecutableStatisticKHR ) == sizeof( VkPipelineExecutableStatisticKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct PipelineInfoKHR - { - VULKAN_HPP_CONSTEXPR PipelineInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {} ) VULKAN_HPP_NOEXCEPT - : pipeline( pipeline_ ) - {} - - VULKAN_HPP_NAMESPACE::PipelineInfoKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineInfoKHR ) - offsetof( PipelineInfoKHR, pNext ) ); - return *this; - } - - PipelineInfoKHR( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineInfoKHR& operator=( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PipelineInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT - { - pipeline = pipeline_; - return *this; - } - - operator VkPipelineInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPipelineInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pipeline == rhs.pipeline ); - } - - bool operator!=( PipelineInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; - }; - static_assert( sizeof( PipelineInfoKHR ) == sizeof( VkPipelineInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct PushConstantRange - { - VULKAN_HPP_CONSTEXPR PushConstantRange( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, - uint32_t offset_ = {}, - uint32_t size_ = {} ) VULKAN_HPP_NOEXCEPT - : stageFlags( stageFlags_ ) - , offset( offset_ ) - , size( size_ ) - {} - - PushConstantRange( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PushConstantRange& operator=( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PushConstantRange & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT - { - stageFlags = stageFlags_; - return *this; - } - - PushConstantRange & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT - { - offset = offset_; - return *this; - } - - PushConstantRange & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT - { - size = size_; - return *this; - } - - operator VkPushConstantRange const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( PushConstantRange const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( stageFlags == rhs.stageFlags ) - && ( offset == rhs.offset ) - && ( size == rhs.size ); - } - - bool operator!=( PushConstantRange const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; - uint32_t offset = {}; - uint32_t size = {}; - }; - static_assert( sizeof( PushConstantRange ) == sizeof( VkPushConstantRange ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct PipelineLayoutCreateInfo - { - VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ = {}, - uint32_t setLayoutCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ = {}, - uint32_t pushConstantRangeCount_ = {}, - const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , setLayoutCount( setLayoutCount_ ) - , pSetLayouts( pSetLayouts_ ) - , pushConstantRangeCount( pushConstantRangeCount_ ) - , pPushConstantRanges( pPushConstantRanges_ ) - {} - - VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo ) - offsetof( PipelineLayoutCreateInfo, pNext ) ); - return *this; - } - - PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineLayoutCreateInfo& operator=( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineLayoutCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PipelineLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - PipelineLayoutCreateInfo & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT - { - setLayoutCount = setLayoutCount_; - return *this; - } - - PipelineLayoutCreateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ ) VULKAN_HPP_NOEXCEPT - { - pSetLayouts = pSetLayouts_; - return *this; - } - - PipelineLayoutCreateInfo & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT - { - pushConstantRangeCount = pushConstantRangeCount_; - return *this; - } - - PipelineLayoutCreateInfo & setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT - { - pPushConstantRanges = pPushConstantRanges_; - return *this; - } - - operator VkPipelineLayoutCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPipelineLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( setLayoutCount == rhs.setLayoutCount ) - && ( pSetLayouts == rhs.pSetLayouts ) - && ( pushConstantRangeCount == rhs.pushConstantRangeCount ) - && ( pPushConstantRanges == rhs.pPushConstantRanges ); - } - - bool operator!=( PipelineLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLayoutCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags = {}; - uint32_t setLayoutCount = {}; - const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts = {}; - uint32_t pushConstantRangeCount = {}; - const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges = {}; - }; - static_assert( sizeof( PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PipelineRasterizationConservativeStateCreateInfoEXT { - VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = {}, - VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled, - float extraPrimitiveOverestimationSize_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , conservativeRasterizationMode( conservativeRasterizationMode_ ) - , extraPrimitiveOverestimationSize( extraPrimitiveOverestimationSize_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled, float extraPrimitiveOverestimationSize_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), conservativeRasterizationMode( conservativeRasterizationMode_ ), extraPrimitiveOverestimationSize( extraPrimitiveOverestimationSize_ ) {} - VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT ) - offsetof( PipelineRasterizationConservativeStateCreateInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineRasterizationConservativeStateCreateInfoEXT( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PipelineRasterizationConservativeStateCreateInfoEXT& operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationConservativeStateCreateInfoEXT & operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineRasterizationConservativeStateCreateInfoEXT & operator=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) ); return *this; } @@ -52852,6 +72917,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPipelineRasterizationConservativeStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -52862,6 +72928,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineRasterizationConservativeStateCreateInfoEXT const& ) const = default; +#else bool operator==( PipelineRasterizationConservativeStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -52875,6 +72945,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT; @@ -52882,32 +72955,44 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags = {}; VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled; float extraPrimitiveOverestimationSize = {}; + }; static_assert( sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationConservativeStateCreateInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PipelineRasterizationConservativeStateCreateInfoEXT; + }; + struct PipelineRasterizationDepthClipStateCreateInfoEXT { - VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , depthClipEnable( depthClipEnable_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), depthClipEnable( depthClipEnable_ ) {} - VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT ) - offsetof( PipelineRasterizationDepthClipStateCreateInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineRasterizationDepthClipStateCreateInfoEXT( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PipelineRasterizationDepthClipStateCreateInfoEXT& operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineRasterizationDepthClipStateCreateInfoEXT ) ); return *this; } @@ -52929,6 +73014,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -52939,6 +73025,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineRasterizationDepthClipStateCreateInfoEXT const& ) const = default; +#else bool operator==( PipelineRasterizationDepthClipStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -52951,42 +73041,53 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT; const void* pNext = {}; VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags = {}; VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {}; + }; static_assert( sizeof( PipelineRasterizationDepthClipStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationDepthClipStateCreateInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PipelineRasterizationDepthClipStateCreateInfoEXT; + }; + struct PipelineRasterizationLineStateCreateInfoEXT { - VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault, - VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ = {}, - uint32_t lineStippleFactor_ = {}, - uint16_t lineStipplePattern_ = {} ) VULKAN_HPP_NOEXCEPT - : lineRasterizationMode( lineRasterizationMode_ ) - , stippledLineEnable( stippledLineEnable_ ) - , lineStippleFactor( lineStippleFactor_ ) - , lineStipplePattern( lineStipplePattern_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault, VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ = {}, uint32_t lineStippleFactor_ = {}, uint16_t lineStipplePattern_ = {}) VULKAN_HPP_NOEXCEPT + : lineRasterizationMode( lineRasterizationMode_ ), stippledLineEnable( stippledLineEnable_ ), lineStippleFactor( lineStippleFactor_ ), lineStipplePattern( lineStipplePattern_ ) {} - VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT ) - offsetof( PipelineRasterizationLineStateCreateInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineRasterizationLineStateCreateInfoEXT( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PipelineRasterizationLineStateCreateInfoEXT& operator=( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationLineStateCreateInfoEXT & operator=( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineRasterizationLineStateCreateInfoEXT & operator=( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineRasterizationLineStateCreateInfoEXT ) ); return *this; } @@ -53020,6 +73121,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPipelineRasterizationLineStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -53030,6 +73132,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineRasterizationLineStateCreateInfoEXT const& ) const = default; +#else bool operator==( PipelineRasterizationLineStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -53044,6 +73150,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT; @@ -53052,30 +73161,44 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable = {}; uint32_t lineStippleFactor = {}; uint16_t lineStipplePattern = {}; + }; static_assert( sizeof( PipelineRasterizationLineStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationLineStateCreateInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PipelineRasterizationLineStateCreateInfoEXT; + }; + struct PipelineRasterizationStateRasterizationOrderAMD { - VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict ) VULKAN_HPP_NOEXCEPT - : rasterizationOrder( rasterizationOrder_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD(VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict) VULKAN_HPP_NOEXCEPT + : rasterizationOrder( rasterizationOrder_ ) {} - VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD & operator=( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD ) - offsetof( PipelineRasterizationStateRasterizationOrderAMD, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PipelineRasterizationStateRasterizationOrderAMD& operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateRasterizationOrderAMD & operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineRasterizationStateRasterizationOrderAMD & operator=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineRasterizationStateRasterizationOrderAMD ) ); return *this; } @@ -53091,6 +73214,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPipelineRasterizationStateRasterizationOrderAMD const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -53101,6 +73225,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineRasterizationStateRasterizationOrderAMD const& ) const = default; +#else bool operator==( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -53112,37 +73240,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD; const void* pNext = {}; VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict; + }; static_assert( sizeof( PipelineRasterizationStateRasterizationOrderAMD ) == sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PipelineRasterizationStateRasterizationOrderAMD; + }; + struct PipelineRasterizationStateStreamCreateInfoEXT { - VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = {}, - uint32_t rasterizationStream_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , rasterizationStream( rasterizationStream_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = {}, uint32_t rasterizationStream_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), rasterizationStream( rasterizationStream_ ) {} - VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT ) - offsetof( PipelineRasterizationStateStreamCreateInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PipelineRasterizationStateStreamCreateInfoEXT& operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateStreamCreateInfoEXT & operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineRasterizationStateStreamCreateInfoEXT & operator=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) ); return *this; } @@ -53164,6 +73307,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPipelineRasterizationStateStreamCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -53174,6 +73318,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineRasterizationStateStreamCreateInfoEXT const& ) const = default; +#else bool operator==( PipelineRasterizationStateStreamCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -53186,36 +73334,53 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT; const void* pNext = {}; VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags = {}; uint32_t rasterizationStream = {}; + }; static_assert( sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) == sizeof( VkPipelineRasterizationStateStreamCreateInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PipelineRasterizationStateStreamCreateInfoEXT; + }; + struct PipelineRepresentativeFragmentTestStateCreateInfoNV { - VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ = {} ) VULKAN_HPP_NOEXCEPT - : representativeFragmentTestEnable( representativeFragmentTestEnable_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ = {}) VULKAN_HPP_NOEXCEPT + : representativeFragmentTestEnable( representativeFragmentTestEnable_ ) {} - VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV ) - offsetof( PipelineRepresentativeFragmentTestStateCreateInfoNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineRepresentativeFragmentTestStateCreateInfoNV( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PipelineRepresentativeFragmentTestStateCreateInfoNV& operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRepresentativeFragmentTestStateCreateInfoNV & operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineRepresentativeFragmentTestStateCreateInfoNV & operator=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) ); return *this; } @@ -53231,6 +73396,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -53241,6 +73407,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineRepresentativeFragmentTestStateCreateInfoNV const& ) const = default; +#else bool operator==( PipelineRepresentativeFragmentTestStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -53252,37 +73422,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV; const void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable = {}; + }; static_assert( sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) == sizeof( VkPipelineRepresentativeFragmentTestStateCreateInfoNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PipelineRepresentativeFragmentTestStateCreateInfoNV; + }; + struct PipelineSampleLocationsStateCreateInfoEXT { - VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ = {}, - VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT - : sampleLocationsEnable( sampleLocationsEnable_ ) - , sampleLocationsInfo( sampleLocationsInfo_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ = {}, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}) VULKAN_HPP_NOEXCEPT + : sampleLocationsEnable( sampleLocationsEnable_ ), sampleLocationsInfo( sampleLocationsInfo_ ) {} - VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT ) - offsetof( PipelineSampleLocationsStateCreateInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PipelineSampleLocationsStateCreateInfoEXT& operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineSampleLocationsStateCreateInfoEXT & operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineSampleLocationsStateCreateInfoEXT & operator=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineSampleLocationsStateCreateInfoEXT ) ); return *this; } @@ -53298,12 +73483,13 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT + PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT { sampleLocationsInfo = sampleLocationsInfo_; return *this; } + operator VkPipelineSampleLocationsStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -53314,6 +73500,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineSampleLocationsStateCreateInfoEXT const& ) const = default; +#else bool operator==( PipelineSampleLocationsStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -53326,39 +73516,57 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT; const void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable = {}; VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; + }; static_assert( sizeof( PipelineSampleLocationsStateCreateInfoEXT ) == sizeof( VkPipelineSampleLocationsStateCreateInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PipelineSampleLocationsStateCreateInfoEXT; + }; + struct PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT { - PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( uint32_t requiredSubgroupSize_ = {} ) VULKAN_HPP_NOEXCEPT - : requiredSubgroupSize( requiredSubgroupSize_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT(uint32_t requiredSubgroupSize_ = {}) VULKAN_HPP_NOEXCEPT + : requiredSubgroupSize( requiredSubgroupSize_ ) {} - VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ) - offsetof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT& operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT & operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT & operator=( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ) ); + return *this; + } + + operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -53369,6 +73577,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const& ) const = default; +#else bool operator==( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -53380,35 +73592,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; void* pNext = {}; uint32_t requiredSubgroupSize = {}; + }; static_assert( sizeof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ) == sizeof( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; + }; + struct PipelineTessellationDomainOriginStateCreateInfo { - VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft ) VULKAN_HPP_NOEXCEPT - : domainOrigin( domainOrigin_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo(VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft) VULKAN_HPP_NOEXCEPT + : domainOrigin( domainOrigin_ ) {} - VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo ) - offsetof( PipelineTessellationDomainOriginStateCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PipelineTessellationDomainOriginStateCreateInfo& operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineTessellationDomainOriginStateCreateInfo & operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineTessellationDomainOriginStateCreateInfo & operator=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineTessellationDomainOriginStateCreateInfo ) ); return *this; } @@ -53424,6 +73653,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPipelineTessellationDomainOriginStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -53434,6 +73664,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineTessellationDomainOriginStateCreateInfo const& ) const = default; +#else bool operator==( PipelineTessellationDomainOriginStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -53445,31 +73679,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo; const void* pNext = {}; VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft; + }; static_assert( sizeof( PipelineTessellationDomainOriginStateCreateInfo ) == sizeof( VkPipelineTessellationDomainOriginStateCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PipelineTessellationDomainOriginStateCreateInfo; + }; + using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo; + struct VertexInputBindingDivisorDescriptionEXT { - VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT( uint32_t binding_ = {}, - uint32_t divisor_ = {} ) VULKAN_HPP_NOEXCEPT - : binding( binding_ ) - , divisor( divisor_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT(uint32_t binding_ = {}, uint32_t divisor_ = {}) VULKAN_HPP_NOEXCEPT + : binding( binding_ ), divisor( divisor_ ) {} + VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT( VertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VertexInputBindingDivisorDescriptionEXT( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VertexInputBindingDivisorDescriptionEXT& operator=( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VertexInputBindingDivisorDescriptionEXT & operator=( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VertexInputBindingDivisorDescriptionEXT & operator=( VertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( VertexInputBindingDivisorDescriptionEXT ) ); return *this; } @@ -53485,6 +73740,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkVertexInputBindingDivisorDescriptionEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -53495,6 +73751,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( VertexInputBindingDivisorDescriptionEXT const& ) const = default; +#else bool operator==( VertexInputBindingDivisorDescriptionEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( binding == rhs.binding ) @@ -53505,36 +73765,51 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: uint32_t binding = {}; uint32_t divisor = {}; + }; static_assert( sizeof( VertexInputBindingDivisorDescriptionEXT ) == sizeof( VkVertexInputBindingDivisorDescriptionEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PipelineVertexInputDivisorStateCreateInfoEXT { - VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT( uint32_t vertexBindingDivisorCount_ = {}, - const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ = {} ) VULKAN_HPP_NOEXCEPT - : vertexBindingDivisorCount( vertexBindingDivisorCount_ ) - , pVertexBindingDivisors( pVertexBindingDivisors_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT(uint32_t vertexBindingDivisorCount_ = {}, const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ = {}) VULKAN_HPP_NOEXCEPT + : vertexBindingDivisorCount( vertexBindingDivisorCount_ ), pVertexBindingDivisors( pVertexBindingDivisors_ ) {} - VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT ) - offsetof( PipelineVertexInputDivisorStateCreateInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineVertexInputDivisorStateCreateInfoEXT( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineVertexInputDivisorStateCreateInfoEXT& operator=( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineVertexInputDivisorStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexBindingDivisors_ ) + : vertexBindingDivisorCount( static_cast( vertexBindingDivisors_.size() ) ), pVertexBindingDivisors( vertexBindingDivisors_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PipelineVertexInputDivisorStateCreateInfoEXT & operator=( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineVertexInputDivisorStateCreateInfoEXT & operator=( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) ); return *this; } @@ -53556,6 +73831,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineVertexInputDivisorStateCreateInfoEXT & setVertexBindingDivisors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexBindingDivisors_ ) VULKAN_HPP_NOEXCEPT + { + vertexBindingDivisorCount = static_cast( vertexBindingDivisors_.size() ); + pVertexBindingDivisors = vertexBindingDivisors_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkPipelineVertexInputDivisorStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -53566,6 +73851,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineVertexInputDivisorStateCreateInfoEXT const& ) const = default; +#else bool operator==( PipelineVertexInputDivisorStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -53578,40 +73867,59 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT; const void* pNext = {}; uint32_t vertexBindingDivisorCount = {}; const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors = {}; + }; static_assert( sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) == sizeof( VkPipelineVertexInputDivisorStateCreateInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PipelineVertexInputDivisorStateCreateInfoEXT; + }; + struct PipelineViewportCoarseSampleOrderStateCreateInfoNV { - VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault, - uint32_t customSampleOrderCount_ = {}, - const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders_ = {} ) VULKAN_HPP_NOEXCEPT - : sampleOrderType( sampleOrderType_ ) - , customSampleOrderCount( customSampleOrderCount_ ) - , pCustomSampleOrders( pCustomSampleOrders_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV(VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault, uint32_t customSampleOrderCount_ = {}, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders_ = {}) VULKAN_HPP_NOEXCEPT + : sampleOrderType( sampleOrderType_ ), customSampleOrderCount( customSampleOrderCount_ ), pCustomSampleOrders( pCustomSampleOrders_ ) {} - VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV ) - offsetof( PipelineViewportCoarseSampleOrderStateCreateInfoNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineViewportCoarseSampleOrderStateCreateInfoNV( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineViewportCoarseSampleOrderStateCreateInfoNV& operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineViewportCoarseSampleOrderStateCreateInfoNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & customSampleOrders_ ) + : sampleOrderType( sampleOrderType_ ), customSampleOrderCount( static_cast( customSampleOrders_.size() ) ), pCustomSampleOrders( customSampleOrders_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PipelineViewportCoarseSampleOrderStateCreateInfoNV & operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineViewportCoarseSampleOrderStateCreateInfoNV & operator=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineViewportCoarseSampleOrderStateCreateInfoNV ) ); return *this; } @@ -53639,6 +73947,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineViewportCoarseSampleOrderStateCreateInfoNV & setCustomSampleOrders( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & customSampleOrders_ ) VULKAN_HPP_NOEXCEPT + { + customSampleOrderCount = static_cast( customSampleOrders_.size() ); + pCustomSampleOrders = customSampleOrders_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -53649,6 +73967,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineViewportCoarseSampleOrderStateCreateInfoNV const& ) const = default; +#else bool operator==( PipelineViewportCoarseSampleOrderStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -53662,6 +73984,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV; @@ -53669,32 +73994,50 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault; uint32_t customSampleOrderCount = {}; const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders = {}; + }; static_assert( sizeof( PipelineViewportCoarseSampleOrderStateCreateInfoNV ) == sizeof( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PipelineViewportCoarseSampleOrderStateCreateInfoNV; + }; + struct PipelineViewportExclusiveScissorStateCreateInfoNV { - VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( uint32_t exclusiveScissorCount_ = {}, - const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors_ = {} ) VULKAN_HPP_NOEXCEPT - : exclusiveScissorCount( exclusiveScissorCount_ ) - , pExclusiveScissors( pExclusiveScissors_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV(uint32_t exclusiveScissorCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors_ = {}) VULKAN_HPP_NOEXCEPT + : exclusiveScissorCount( exclusiveScissorCount_ ), pExclusiveScissors( pExclusiveScissors_ ) {} - VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV ) - offsetof( PipelineViewportExclusiveScissorStateCreateInfoNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineViewportExclusiveScissorStateCreateInfoNV& operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineViewportExclusiveScissorStateCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & exclusiveScissors_ ) + : exclusiveScissorCount( static_cast( exclusiveScissors_.size() ) ), pExclusiveScissors( exclusiveScissors_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PipelineViewportExclusiveScissorStateCreateInfoNV & operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineViewportExclusiveScissorStateCreateInfoNV & operator=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) ); return *this; } @@ -53716,6 +74059,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & exclusiveScissors_ ) VULKAN_HPP_NOEXCEPT + { + exclusiveScissorCount = static_cast( exclusiveScissors_.size() ); + pExclusiveScissors = exclusiveScissors_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -53726,6 +74079,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineViewportExclusiveScissorStateCreateInfoNV const& ) const = default; +#else bool operator==( PipelineViewportExclusiveScissorStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -53738,99 +74095,59 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV; const void* pNext = {}; uint32_t exclusiveScissorCount = {}; const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors = {}; + }; static_assert( sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) == sizeof( VkPipelineViewportExclusiveScissorStateCreateInfoNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct ShadingRatePaletteNV + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( uint32_t shadingRatePaletteEntryCount_ = {}, - const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ = {} ) VULKAN_HPP_NOEXCEPT - : shadingRatePaletteEntryCount( shadingRatePaletteEntryCount_ ) - , pShadingRatePaletteEntries( pShadingRatePaletteEntries_ ) - {} - - ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ShadingRatePaletteNV& operator=( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ShadingRatePaletteNV & setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) VULKAN_HPP_NOEXCEPT - { - shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_; - return *this; - } - - ShadingRatePaletteNV & setPShadingRatePaletteEntries( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT - { - pShadingRatePaletteEntries = pShadingRatePaletteEntries_; - return *this; - } - - operator VkShadingRatePaletteNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ShadingRatePaletteNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount ) - && ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries ); - } - - bool operator!=( ShadingRatePaletteNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - uint32_t shadingRatePaletteEntryCount = {}; - const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries = {}; + using Type = PipelineViewportExclusiveScissorStateCreateInfoNV; }; - static_assert( sizeof( ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PipelineViewportShadingRateImageStateCreateInfoNV { - VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ = {}, - uint32_t viewportCount_ = {}, - const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes_ = {} ) VULKAN_HPP_NOEXCEPT - : shadingRateImageEnable( shadingRateImageEnable_ ) - , viewportCount( viewportCount_ ) - , pShadingRatePalettes( pShadingRatePalettes_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes_ = {}) VULKAN_HPP_NOEXCEPT + : shadingRateImageEnable( shadingRateImageEnable_ ), viewportCount( viewportCount_ ), pShadingRatePalettes( pShadingRatePalettes_ ) {} - VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV ) - offsetof( PipelineViewportShadingRateImageStateCreateInfoNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineViewportShadingRateImageStateCreateInfoNV& operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineViewportShadingRateImageStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & shadingRatePalettes_ ) + : shadingRateImageEnable( shadingRateImageEnable_ ), viewportCount( static_cast( shadingRatePalettes_.size() ) ), pShadingRatePalettes( shadingRatePalettes_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PipelineViewportShadingRateImageStateCreateInfoNV & operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineViewportShadingRateImageStateCreateInfoNV & operator=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) ); return *this; } @@ -53858,6 +74175,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRatePalettes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & shadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = static_cast( shadingRatePalettes_.size() ); + pShadingRatePalettes = shadingRatePalettes_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkPipelineViewportShadingRateImageStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -53868,6 +74195,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineViewportShadingRateImageStateCreateInfoNV const& ) const = default; +#else bool operator==( PipelineViewportShadingRateImageStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -53881,6 +74212,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV; @@ -53888,30 +74222,43 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable = {}; uint32_t viewportCount = {}; const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes = {}; + }; static_assert( sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) == sizeof( VkPipelineViewportShadingRateImageStateCreateInfoNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PipelineViewportShadingRateImageStateCreateInfoNV; + }; + struct ViewportSwizzleNV { - VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, - VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, - VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, - VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX ) VULKAN_HPP_NOEXCEPT - : x( x_ ) - , y( y_ ) - , z( z_ ) - , w( w_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ViewportSwizzleNV(VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX) VULKAN_HPP_NOEXCEPT + : x( x_ ), y( y_ ), z( z_ ), w( w_ ) {} + VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ViewportSwizzleNV( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - ViewportSwizzleNV& operator=( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT + ViewportSwizzleNV & operator=( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ViewportSwizzleNV & operator=( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ViewportSwizzleNV ) ); return *this; } @@ -53939,6 +74286,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkViewportSwizzleNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -53949,6 +74297,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ViewportSwizzleNV const& ) const = default; +#else bool operator==( ViewportSwizzleNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( x == rhs.x ) @@ -53961,40 +74313,53 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; + }; static_assert( sizeof( ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PipelineViewportSwizzleStateCreateInfoNV { - VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ = {}, - uint32_t viewportCount_ = {}, - const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV* pViewportSwizzles_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , viewportCount( viewportCount_ ) - , pViewportSwizzles( pViewportSwizzles_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV* pViewportSwizzles_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), viewportCount( viewportCount_ ), pViewportSwizzles( pViewportSwizzles_ ) {} - VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV ) - offsetof( PipelineViewportSwizzleStateCreateInfoNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineViewportSwizzleStateCreateInfoNV& operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineViewportSwizzleStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewportSwizzles_ ) + : flags( flags_ ), viewportCount( static_cast( viewportSwizzles_.size() ) ), pViewportSwizzles( viewportSwizzles_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PipelineViewportSwizzleStateCreateInfoNV & operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineViewportSwizzleStateCreateInfoNV & operator=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineViewportSwizzleStateCreateInfoNV ) ); return *this; } @@ -54022,6 +74387,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineViewportSwizzleStateCreateInfoNV & setViewportSwizzles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewportSwizzles_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = static_cast( viewportSwizzles_.size() ); + pViewportSwizzles = viewportSwizzles_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkPipelineViewportSwizzleStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -54032,6 +74407,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineViewportSwizzleStateCreateInfoNV const& ) const = default; +#else bool operator==( PipelineViewportSwizzleStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -54045,6 +74424,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV; @@ -54052,93 +74434,50 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags = {}; uint32_t viewportCount = {}; const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV* pViewportSwizzles = {}; + }; static_assert( sizeof( PipelineViewportSwizzleStateCreateInfoNV ) == sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct ViewportWScalingNV + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR ViewportWScalingNV( float xcoeff_ = {}, - float ycoeff_ = {} ) VULKAN_HPP_NOEXCEPT - : xcoeff( xcoeff_ ) - , ycoeff( ycoeff_ ) - {} - - ViewportWScalingNV( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ViewportWScalingNV& operator=( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ViewportWScalingNV & setXcoeff( float xcoeff_ ) VULKAN_HPP_NOEXCEPT - { - xcoeff = xcoeff_; - return *this; - } - - ViewportWScalingNV & setYcoeff( float ycoeff_ ) VULKAN_HPP_NOEXCEPT - { - ycoeff = ycoeff_; - return *this; - } - - operator VkViewportWScalingNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ViewportWScalingNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( xcoeff == rhs.xcoeff ) - && ( ycoeff == rhs.ycoeff ); - } - - bool operator!=( ViewportWScalingNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - float xcoeff = {}; - float ycoeff = {}; + using Type = PipelineViewportSwizzleStateCreateInfoNV; }; - static_assert( sizeof( ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PipelineViewportWScalingStateCreateInfoNV { - VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ = {}, - uint32_t viewportCount_ = {}, - const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings_ = {} ) VULKAN_HPP_NOEXCEPT - : viewportWScalingEnable( viewportWScalingEnable_ ) - , viewportCount( viewportCount_ ) - , pViewportWScalings( pViewportWScalings_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportWScalingStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings_ = {}) VULKAN_HPP_NOEXCEPT + : viewportWScalingEnable( viewportWScalingEnable_ ), viewportCount( viewportCount_ ), pViewportWScalings( pViewportWScalings_ ) {} - VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV ) - offsetof( PipelineViewportWScalingStateCreateInfoNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PipelineViewportWScalingStateCreateInfoNV& operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineViewportWScalingStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewportWScalings_ ) + : viewportWScalingEnable( viewportWScalingEnable_ ), viewportCount( static_cast( viewportWScalings_.size() ) ), pViewportWScalings( viewportWScalings_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PipelineViewportWScalingStateCreateInfoNV & operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineViewportWScalingStateCreateInfoNV & operator=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PipelineViewportWScalingStateCreateInfoNV ) ); return *this; } @@ -54166,6 +74505,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PipelineViewportWScalingStateCreateInfoNV & setViewportWScalings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewportWScalings_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = static_cast( viewportWScalings_.size() ); + pViewportWScalings = viewportWScalings_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkPipelineViewportWScalingStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -54176,6 +74525,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineViewportWScalingStateCreateInfoNV const& ) const = default; +#else bool operator==( PipelineViewportWScalingStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -54189,6 +74542,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV; @@ -54196,32 +74552,45 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable = {}; uint32_t viewportCount = {}; const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings = {}; + }; static_assert( sizeof( PipelineViewportWScalingStateCreateInfoNV ) == sizeof( VkPipelineViewportWScalingStateCreateInfoNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#ifdef VK_USE_PLATFORM_GGP + template <> + struct CppType + { + using Type = PipelineViewportWScalingStateCreateInfoNV; + }; +#ifdef VK_USE_PLATFORM_GGP struct PresentFrameTokenGGP { - VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( GgpFrameToken frameToken_ = {} ) VULKAN_HPP_NOEXCEPT - : frameToken( frameToken_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentFrameTokenGGP; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP(GgpFrameToken frameToken_ = {}) VULKAN_HPP_NOEXCEPT + : frameToken( frameToken_ ) {} - VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP & operator=( VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP ) - offsetof( PresentFrameTokenGGP, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default; PresentFrameTokenGGP( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PresentFrameTokenGGP& operator=( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT + PresentFrameTokenGGP & operator=( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PresentFrameTokenGGP & operator=( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PresentFrameTokenGGP ) ); return *this; } @@ -54237,6 +74606,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPresentFrameTokenGGP const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -54247,177 +74617,83 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PresentFrameTokenGGP const& ) const = default; +#else bool operator==( PresentFrameTokenGGP const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) - && ( frameToken == rhs.frameToken ); + && ( memcmp( &frameToken, &rhs.frameToken, sizeof( GgpFrameToken ) ) == 0 ); } bool operator!=( PresentFrameTokenGGP const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentFrameTokenGGP; const void* pNext = {}; GgpFrameToken frameToken = {}; + }; static_assert( sizeof( PresentFrameTokenGGP ) == sizeof( VkPresentFrameTokenGGP ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_GGP*/ - struct PresentInfoKHR + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR PresentInfoKHR( uint32_t waitSemaphoreCount_ = {}, - const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {}, - uint32_t swapchainCount_ = {}, - const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains_ = {}, - const uint32_t* pImageIndices_ = {}, - VULKAN_HPP_NAMESPACE::Result* pResults_ = {} ) VULKAN_HPP_NOEXCEPT - : waitSemaphoreCount( waitSemaphoreCount_ ) - , pWaitSemaphores( pWaitSemaphores_ ) - , swapchainCount( swapchainCount_ ) - , pSwapchains( pSwapchains_ ) - , pImageIndices( pImageIndices_ ) - , pResults( pResults_ ) - {} - - VULKAN_HPP_NAMESPACE::PresentInfoKHR & operator=( VULKAN_HPP_NAMESPACE::PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PresentInfoKHR ) - offsetof( PresentInfoKHR, pNext ) ); - return *this; - } - - PresentInfoKHR( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PresentInfoKHR& operator=( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PresentInfoKHR & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT - { - waitSemaphoreCount = waitSemaphoreCount_; - return *this; - } - - PresentInfoKHR & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT - { - pWaitSemaphores = pWaitSemaphores_; - return *this; - } - - PresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT - { - swapchainCount = swapchainCount_; - return *this; - } - - PresentInfoKHR & setPSwapchains( const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains_ ) VULKAN_HPP_NOEXCEPT - { - pSwapchains = pSwapchains_; - return *this; - } - - PresentInfoKHR & setPImageIndices( const uint32_t* pImageIndices_ ) VULKAN_HPP_NOEXCEPT - { - pImageIndices = pImageIndices_; - return *this; - } - - PresentInfoKHR & setPResults( VULKAN_HPP_NAMESPACE::Result* pResults_ ) VULKAN_HPP_NOEXCEPT - { - pResults = pResults_; - return *this; - } - - operator VkPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPresentInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( PresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) - && ( pWaitSemaphores == rhs.pWaitSemaphores ) - && ( swapchainCount == rhs.swapchainCount ) - && ( pSwapchains == rhs.pSwapchains ) - && ( pImageIndices == rhs.pImageIndices ) - && ( pResults == rhs.pResults ); - } - - bool operator!=( PresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentInfoKHR; - const void* pNext = {}; - uint32_t waitSemaphoreCount = {}; - const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores = {}; - uint32_t swapchainCount = {}; - const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains = {}; - const uint32_t* pImageIndices = {}; - VULKAN_HPP_NAMESPACE::Result* pResults = {}; + using Type = PresentFrameTokenGGP; }; - static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_GGP*/ struct RectLayerKHR { - VULKAN_HPP_CONSTEXPR RectLayerKHR( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}, - uint32_t layer_ = {} ) VULKAN_HPP_NOEXCEPT - : offset( offset_ ) - , extent( extent_ ) - , layer( layer_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RectLayerKHR(VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}, uint32_t layer_ = {}) VULKAN_HPP_NOEXCEPT + : offset( offset_ ), extent( extent_ ), layer( layer_ ) {} - explicit RectLayerKHR( Rect2D const& rect2D, - uint32_t layer_ = {} ) - : offset( rect2D.offset ) - , extent( rect2D.extent ) - , layer( layer_ ) - {} + VULKAN_HPP_CONSTEXPR RectLayerKHR( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; RectLayerKHR( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - RectLayerKHR& operator=( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT + explicit RectLayerKHR( Rect2D const& rect2D, uint32_t layer_ = {} ) + : offset( rect2D.offset ) + , extent( rect2D.extent ) + , layer( layer_ ) + {} +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + RectLayerKHR & operator=( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - RectLayerKHR & setOffset( VULKAN_HPP_NAMESPACE::Offset2D offset_ ) VULKAN_HPP_NOEXCEPT + RectLayerKHR & operator=( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( RectLayerKHR ) ); + return *this; + } + + RectLayerKHR & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } - RectLayerKHR & setExtent( VULKAN_HPP_NAMESPACE::Extent2D extent_ ) VULKAN_HPP_NOEXCEPT + RectLayerKHR & setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT { extent = extent_; return *this; @@ -54429,6 +74705,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkRectLayerKHR const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -54439,6 +74716,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RectLayerKHR const& ) const = default; +#else bool operator==( RectLayerKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( offset == rhs.offset ) @@ -54450,31 +74731,51 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: VULKAN_HPP_NAMESPACE::Offset2D offset = {}; VULKAN_HPP_NAMESPACE::Extent2D extent = {}; uint32_t layer = {}; + }; static_assert( sizeof( RectLayerKHR ) == sizeof( VkRectLayerKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PresentRegionKHR { - VULKAN_HPP_CONSTEXPR PresentRegionKHR( uint32_t rectangleCount_ = {}, - const VULKAN_HPP_NAMESPACE::RectLayerKHR* pRectangles_ = {} ) VULKAN_HPP_NOEXCEPT - : rectangleCount( rectangleCount_ ) - , pRectangles( pRectangles_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentRegionKHR(uint32_t rectangleCount_ = {}, const VULKAN_HPP_NAMESPACE::RectLayerKHR* pRectangles_ = {}) VULKAN_HPP_NOEXCEPT + : rectangleCount( rectangleCount_ ), pRectangles( pRectangles_ ) {} + VULKAN_HPP_CONSTEXPR PresentRegionKHR( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PresentRegionKHR( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PresentRegionKHR& operator=( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PresentRegionKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & rectangles_ ) + : rectangleCount( static_cast( rectangles_.size() ) ), pRectangles( rectangles_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PresentRegionKHR & operator=( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PresentRegionKHR & operator=( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PresentRegionKHR ) ); return *this; } @@ -54490,6 +74791,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PresentRegionKHR & setRectangles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & rectangles_ ) VULKAN_HPP_NOEXCEPT + { + rectangleCount = static_cast( rectangles_.size() ); + pRectangles = rectangles_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkPresentRegionKHR const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -54500,6 +74811,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PresentRegionKHR const& ) const = default; +#else bool operator==( PresentRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( rectangleCount == rhs.rectangleCount ) @@ -54510,36 +74825,51 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: uint32_t rectangleCount = {}; const VULKAN_HPP_NAMESPACE::RectLayerKHR* pRectangles = {}; + }; static_assert( sizeof( PresentRegionKHR ) == sizeof( VkPresentRegionKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PresentRegionsKHR { - VULKAN_HPP_CONSTEXPR PresentRegionsKHR( uint32_t swapchainCount_ = {}, - const VULKAN_HPP_NAMESPACE::PresentRegionKHR* pRegions_ = {} ) VULKAN_HPP_NOEXCEPT - : swapchainCount( swapchainCount_ ) - , pRegions( pRegions_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentRegionsKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentRegionsKHR(uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::PresentRegionKHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT + : swapchainCount( swapchainCount_ ), pRegions( pRegions_ ) {} - VULKAN_HPP_NAMESPACE::PresentRegionsKHR & operator=( VULKAN_HPP_NAMESPACE::PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PresentRegionsKHR ) - offsetof( PresentRegionsKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PresentRegionsKHR( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PresentRegionsKHR( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PresentRegionsKHR& operator=( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PresentRegionsKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + : swapchainCount( static_cast( regions_.size() ) ), pRegions( regions_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PresentRegionsKHR & operator=( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PresentRegionsKHR & operator=( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PresentRegionsKHR ) ); return *this; } @@ -54561,6 +74891,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PresentRegionsKHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkPresentRegionsKHR const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -54571,6 +74911,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PresentRegionsKHR const& ) const = default; +#else bool operator==( PresentRegionsKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -54583,32 +74927,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentRegionsKHR; const void* pNext = {}; uint32_t swapchainCount = {}; const VULKAN_HPP_NAMESPACE::PresentRegionKHR* pRegions = {}; + }; static_assert( sizeof( PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PresentRegionsKHR; + }; + struct PresentTimeGOOGLE { - VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( uint32_t presentID_ = {}, - uint64_t desiredPresentTime_ = {} ) VULKAN_HPP_NOEXCEPT - : presentID( presentID_ ) - , desiredPresentTime( desiredPresentTime_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE(uint32_t presentID_ = {}, uint64_t desiredPresentTime_ = {}) VULKAN_HPP_NOEXCEPT + : presentID( presentID_ ), desiredPresentTime( desiredPresentTime_ ) {} + VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PresentTimeGOOGLE( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - PresentTimeGOOGLE& operator=( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + PresentTimeGOOGLE & operator=( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PresentTimeGOOGLE & operator=( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PresentTimeGOOGLE ) ); return *this; } @@ -54624,6 +74988,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkPresentTimeGOOGLE const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -54634,6 +74999,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PresentTimeGOOGLE const& ) const = default; +#else bool operator==( PresentTimeGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( presentID == rhs.presentID ) @@ -54644,36 +75013,51 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: uint32_t presentID = {}; uint64_t desiredPresentTime = {}; + }; static_assert( sizeof( PresentTimeGOOGLE ) == sizeof( VkPresentTimeGOOGLE ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PresentTimesInfoGOOGLE { - VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( uint32_t swapchainCount_ = {}, - const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE* pTimes_ = {} ) VULKAN_HPP_NOEXCEPT - : swapchainCount( swapchainCount_ ) - , pTimes( pTimes_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentTimesInfoGOOGLE; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE(uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE* pTimes_ = {}) VULKAN_HPP_NOEXCEPT + : swapchainCount( swapchainCount_ ), pTimes( pTimes_ ) {} - VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE & operator=( VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE ) - offsetof( PresentTimesInfoGOOGLE, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; PresentTimesInfoGOOGLE( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - PresentTimesInfoGOOGLE& operator=( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PresentTimesInfoGOOGLE( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & times_ ) + : swapchainCount( static_cast( times_.size() ) ), pTimes( times_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PresentTimesInfoGOOGLE & operator=( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PresentTimesInfoGOOGLE & operator=( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PresentTimesInfoGOOGLE ) ); return *this; } @@ -54695,6 +75079,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + PresentTimesInfoGOOGLE & setTimes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & times_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( times_.size() ); + pTimes = times_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkPresentTimesInfoGOOGLE const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -54705,6 +75099,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PresentTimesInfoGOOGLE const& ) const = default; +#else bool operator==( PresentTimesInfoGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -54717,36 +75115,53 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentTimesInfoGOOGLE; const void* pNext = {}; uint32_t swapchainCount = {}; const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE* pTimes = {}; + }; static_assert( sizeof( PresentTimesInfoGOOGLE ) == sizeof( VkPresentTimesInfoGOOGLE ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PresentTimesInfoGOOGLE; + }; + struct ProtectedSubmitInfo { - VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ = {} ) VULKAN_HPP_NOEXCEPT - : protectedSubmit( protectedSubmit_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eProtectedSubmitInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo(VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ = {}) VULKAN_HPP_NOEXCEPT + : protectedSubmit( protectedSubmit_ ) {} - VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo & operator=( VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo ) - offsetof( ProtectedSubmitInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - ProtectedSubmitInfo& operator=( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ProtectedSubmitInfo & operator=( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ProtectedSubmitInfo & operator=( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ProtectedSubmitInfo ) ); return *this; } @@ -54762,6 +75177,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkProtectedSubmitInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -54772,6 +75188,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ProtectedSubmitInfo const& ) const = default; +#else bool operator==( ProtectedSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -54783,283 +75203,144 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eProtectedSubmitInfo; const void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit = {}; + }; static_assert( sizeof( ProtectedSubmitInfo ) == sizeof( VkProtectedSubmitInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct QueryPoolCreateInfo + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::QueryType queryType_ = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion, - uint32_t queryCount_ = {}, - VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , queryType( queryType_ ) - , queryCount( queryCount_ ) - , pipelineStatistics( pipelineStatistics_ ) - {} - - VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & operator=( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo ) - offsetof( QueryPoolCreateInfo, pNext ) ); - return *this; - } - - QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - QueryPoolCreateInfo& operator=( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - QueryPoolCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - QueryPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - QueryPoolCreateInfo & setQueryType( VULKAN_HPP_NAMESPACE::QueryType queryType_ ) VULKAN_HPP_NOEXCEPT - { - queryType = queryType_; - return *this; - } - - QueryPoolCreateInfo & setQueryCount( uint32_t queryCount_ ) VULKAN_HPP_NOEXCEPT - { - queryCount = queryCount_; - return *this; - } - - QueryPoolCreateInfo & setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT - { - pipelineStatistics = pipelineStatistics_; - return *this; - } - - operator VkQueryPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkQueryPoolCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( QueryPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( queryType == rhs.queryType ) - && ( queryCount == rhs.queryCount ) - && ( pipelineStatistics == rhs.pipelineStatistics ); - } - - bool operator!=( QueryPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::QueryType queryType = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion; - uint32_t queryCount = {}; - VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {}; + using Type = ProtectedSubmitInfo; }; - static_assert( sizeof( QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct QueryPoolCreateInfoINTEL + struct QueryPoolPerformanceQueryCreateInfoINTEL { - VULKAN_HPP_CONSTEXPR QueryPoolCreateInfoINTEL( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual ) VULKAN_HPP_NOEXCEPT - : performanceCountersSampling( performanceCountersSampling_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL(VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual) VULKAN_HPP_NOEXCEPT + : performanceCountersSampling( performanceCountersSampling_ ) {} - VULKAN_HPP_NAMESPACE::QueryPoolCreateInfoINTEL & operator=( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfoINTEL ) - offsetof( QueryPoolCreateInfoINTEL, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; - QueryPoolCreateInfoINTEL( VkQueryPoolCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceQueryCreateInfoINTEL( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - QueryPoolCreateInfoINTEL& operator=( VkQueryPoolCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceQueryCreateInfoINTEL & operator=( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - QueryPoolCreateInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceQueryCreateInfoINTEL & operator=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( QueryPoolPerformanceQueryCreateInfoINTEL ) ); + return *this; + } + + QueryPoolPerformanceQueryCreateInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - QueryPoolCreateInfoINTEL & setPerformanceCountersSampling( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceQueryCreateInfoINTEL & setPerformanceCountersSampling( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT { performanceCountersSampling = performanceCountersSampling_; return *this; } - operator VkQueryPoolCreateInfoINTEL const&() const VULKAN_HPP_NOEXCEPT + + operator VkQueryPoolPerformanceQueryCreateInfoINTEL const&() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkQueryPoolCreateInfoINTEL &() VULKAN_HPP_NOEXCEPT + operator VkQueryPoolPerformanceQueryCreateInfoINTEL &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - bool operator==( QueryPoolCreateInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( QueryPoolPerformanceQueryCreateInfoINTEL const& ) const = default; +#else + bool operator==( QueryPoolPerformanceQueryCreateInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( performanceCountersSampling == rhs.performanceCountersSampling ); } - bool operator!=( QueryPoolCreateInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( QueryPoolPerformanceQueryCreateInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif + + public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfoINTEL; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL; const void* pNext = {}; VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual; - }; - static_assert( sizeof( QueryPoolCreateInfoINTEL ) == sizeof( VkQueryPoolCreateInfoINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct QueryPoolPerformanceCreateInfoKHR + }; + static_assert( sizeof( QueryPoolPerformanceQueryCreateInfoINTEL ) == sizeof( VkQueryPoolPerformanceQueryCreateInfoINTEL ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_ = {}, - uint32_t counterIndexCount_ = {}, - const uint32_t* pCounterIndices_ = {} ) VULKAN_HPP_NOEXCEPT - : queueFamilyIndex( queueFamilyIndex_ ) - , counterIndexCount( counterIndexCount_ ) - , pCounterIndices( pCounterIndices_ ) - {} - - VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR ) - offsetof( QueryPoolPerformanceCreateInfoKHR, pNext ) ); - return *this; - } - - QueryPoolPerformanceCreateInfoKHR( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - QueryPoolPerformanceCreateInfoKHR& operator=( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - QueryPoolPerformanceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - QueryPoolPerformanceCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT - { - queueFamilyIndex = queueFamilyIndex_; - return *this; - } - - QueryPoolPerformanceCreateInfoKHR & setCounterIndexCount( uint32_t counterIndexCount_ ) VULKAN_HPP_NOEXCEPT - { - counterIndexCount = counterIndexCount_; - return *this; - } - - QueryPoolPerformanceCreateInfoKHR & setPCounterIndices( const uint32_t* pCounterIndices_ ) VULKAN_HPP_NOEXCEPT - { - pCounterIndices = pCounterIndices_; - return *this; - } - - operator VkQueryPoolPerformanceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkQueryPoolPerformanceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( QueryPoolPerformanceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( queueFamilyIndex == rhs.queueFamilyIndex ) - && ( counterIndexCount == rhs.counterIndexCount ) - && ( pCounterIndices == rhs.pCounterIndices ); - } - - bool operator!=( QueryPoolPerformanceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceCreateInfoKHR; - const void* pNext = {}; - uint32_t queueFamilyIndex = {}; - uint32_t counterIndexCount = {}; - const uint32_t* pCounterIndices = {}; + using Type = QueryPoolPerformanceQueryCreateInfoINTEL; }; - static_assert( sizeof( QueryPoolPerformanceCreateInfoKHR ) == sizeof( VkQueryPoolPerformanceCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + using QueryPoolCreateInfoINTEL = QueryPoolPerformanceQueryCreateInfoINTEL; struct QueueFamilyCheckpointPropertiesNV { - QueueFamilyCheckpointPropertiesNV( VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask_ = {} ) VULKAN_HPP_NOEXCEPT - : checkpointExecutionStageMask( checkpointExecutionStageMask_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyCheckpointPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV(VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask_ = {}) VULKAN_HPP_NOEXCEPT + : checkpointExecutionStageMask( checkpointExecutionStageMask_ ) {} - VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV & operator=( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV ) - offsetof( QueueFamilyCheckpointPropertiesNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; QueueFamilyCheckpointPropertiesNV( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - QueueFamilyCheckpointPropertiesNV& operator=( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + QueueFamilyCheckpointPropertiesNV & operator=( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + QueueFamilyCheckpointPropertiesNV & operator=( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( QueueFamilyCheckpointPropertiesNV ) ); + return *this; + } + + operator VkQueueFamilyCheckpointPropertiesNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -55070,6 +75351,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( QueueFamilyCheckpointPropertiesNV const& ) const = default; +#else bool operator==( QueueFamilyCheckpointPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -55081,438 +75366,58 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointPropertiesNV; void* pNext = {}; VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask = {}; + }; static_assert( sizeof( QueueFamilyCheckpointPropertiesNV ) == sizeof( VkQueueFamilyCheckpointPropertiesNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct QueueFamilyProperties + template <> + struct CppType { - QueueFamilyProperties( VULKAN_HPP_NAMESPACE::QueueFlags queueFlags_ = {}, - uint32_t queueCount_ = {}, - uint32_t timestampValidBits_ = {}, - VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity_ = {} ) VULKAN_HPP_NOEXCEPT - : queueFlags( queueFlags_ ) - , queueCount( queueCount_ ) - , timestampValidBits( timestampValidBits_ ) - , minImageTransferGranularity( minImageTransferGranularity_ ) - {} - - QueueFamilyProperties( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - QueueFamilyProperties& operator=( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkQueueFamilyProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkQueueFamilyProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( QueueFamilyProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( queueFlags == rhs.queueFlags ) - && ( queueCount == rhs.queueCount ) - && ( timestampValidBits == rhs.timestampValidBits ) - && ( minImageTransferGranularity == rhs.minImageTransferGranularity ); - } - - bool operator!=( QueueFamilyProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::QueueFlags queueFlags = {}; - uint32_t queueCount = {}; - uint32_t timestampValidBits = {}; - VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity = {}; + using Type = QueueFamilyCheckpointPropertiesNV; }; - static_assert( sizeof( QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct QueueFamilyProperties2 - { - QueueFamilyProperties2( VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties_ = {} ) VULKAN_HPP_NOEXCEPT - : queueFamilyProperties( queueFamilyProperties_ ) - {} - - VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 & operator=( VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 ) - offsetof( QueueFamilyProperties2, pNext ) ); - return *this; - } - - QueueFamilyProperties2( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - QueueFamilyProperties2& operator=( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkQueueFamilyProperties2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkQueueFamilyProperties2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( QueueFamilyProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( queueFamilyProperties == rhs.queueFamilyProperties ); - } - - bool operator!=( QueueFamilyProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyProperties2; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties = {}; - }; - static_assert( sizeof( QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct RayTracingShaderGroupCreateInfoNV - { - VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV::eGeneral, - uint32_t generalShader_ = {}, - uint32_t closestHitShader_ = {}, - uint32_t anyHitShader_ = {}, - uint32_t intersectionShader_ = {} ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - , generalShader( generalShader_ ) - , closestHitShader( closestHitShader_ ) - , anyHitShader( anyHitShader_ ) - , intersectionShader( intersectionShader_ ) - {} - - VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV ) - offsetof( RayTracingShaderGroupCreateInfoNV, pNext ) ); - return *this; - } - - RayTracingShaderGroupCreateInfoNV( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - RayTracingShaderGroupCreateInfoNV& operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - RayTracingShaderGroupCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - RayTracingShaderGroupCreateInfoNV & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV type_ ) VULKAN_HPP_NOEXCEPT - { - type = type_; - return *this; - } - - RayTracingShaderGroupCreateInfoNV & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT - { - generalShader = generalShader_; - return *this; - } - - RayTracingShaderGroupCreateInfoNV & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT - { - closestHitShader = closestHitShader_; - return *this; - } - - RayTracingShaderGroupCreateInfoNV & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT - { - anyHitShader = anyHitShader_; - return *this; - } - - RayTracingShaderGroupCreateInfoNV & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT - { - intersectionShader = intersectionShader_; - return *this; - } - - operator VkRayTracingShaderGroupCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkRayTracingShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( RayTracingShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( type == rhs.type ) - && ( generalShader == rhs.generalShader ) - && ( closestHitShader == rhs.closestHitShader ) - && ( anyHitShader == rhs.anyHitShader ) - && ( intersectionShader == rhs.intersectionShader ); - } - - bool operator!=( RayTracingShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV::eGeneral; - uint32_t generalShader = {}; - uint32_t closestHitShader = {}; - uint32_t anyHitShader = {}; - uint32_t intersectionShader = {}; - }; - static_assert( sizeof( RayTracingShaderGroupCreateInfoNV ) == sizeof( VkRayTracingShaderGroupCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct RayTracingPipelineCreateInfoNV - { - VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, - uint32_t stageCount_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, - uint32_t groupCount_ = {}, - const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups_ = {}, - uint32_t maxRecursionDepth_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, - int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , stageCount( stageCount_ ) - , pStages( pStages_ ) - , groupCount( groupCount_ ) - , pGroups( pGroups_ ) - , maxRecursionDepth( maxRecursionDepth_ ) - , layout( layout_ ) - , basePipelineHandle( basePipelineHandle_ ) - , basePipelineIndex( basePipelineIndex_ ) - {} - - VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV ) - offsetof( RayTracingPipelineCreateInfoNV, pNext ) ); - return *this; - } - - RayTracingPipelineCreateInfoNV( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - RayTracingPipelineCreateInfoNV& operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - RayTracingPipelineCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - RayTracingPipelineCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - RayTracingPipelineCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT - { - stageCount = stageCount_; - return *this; - } - - RayTracingPipelineCreateInfoNV & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT - { - pStages = pStages_; - return *this; - } - - RayTracingPipelineCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT - { - groupCount = groupCount_; - return *this; - } - - RayTracingPipelineCreateInfoNV & setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups_ ) VULKAN_HPP_NOEXCEPT - { - pGroups = pGroups_; - return *this; - } - - RayTracingPipelineCreateInfoNV & setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) VULKAN_HPP_NOEXCEPT - { - maxRecursionDepth = maxRecursionDepth_; - return *this; - } - - RayTracingPipelineCreateInfoNV & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT - { - layout = layout_; - return *this; - } - - RayTracingPipelineCreateInfoNV & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT - { - basePipelineHandle = basePipelineHandle_; - return *this; - } - - RayTracingPipelineCreateInfoNV & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT - { - basePipelineIndex = basePipelineIndex_; - return *this; - } - - operator VkRayTracingPipelineCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkRayTracingPipelineCreateInfoNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( RayTracingPipelineCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( stageCount == rhs.stageCount ) - && ( pStages == rhs.pStages ) - && ( groupCount == rhs.groupCount ) - && ( pGroups == rhs.pGroups ) - && ( maxRecursionDepth == rhs.maxRecursionDepth ) - && ( layout == rhs.layout ) - && ( basePipelineHandle == rhs.basePipelineHandle ) - && ( basePipelineIndex == rhs.basePipelineIndex ); - } - - bool operator!=( RayTracingPipelineCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; - uint32_t stageCount = {}; - const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {}; - uint32_t groupCount = {}; - const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups = {}; - uint32_t maxRecursionDepth = {}; - VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; - int32_t basePipelineIndex = {}; - }; - static_assert( sizeof( RayTracingPipelineCreateInfoNV ) == sizeof( VkRayTracingPipelineCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct RefreshCycleDurationGOOGLE - { - RefreshCycleDurationGOOGLE( uint64_t refreshDuration_ = {} ) VULKAN_HPP_NOEXCEPT - : refreshDuration( refreshDuration_ ) - {} - - RefreshCycleDurationGOOGLE( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - RefreshCycleDurationGOOGLE& operator=( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkRefreshCycleDurationGOOGLE const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkRefreshCycleDurationGOOGLE &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( RefreshCycleDurationGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( refreshDuration == rhs.refreshDuration ); - } - - bool operator!=( RefreshCycleDurationGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - uint64_t refreshDuration = {}; - }; - static_assert( sizeof( RefreshCycleDurationGOOGLE ) == sizeof( VkRefreshCycleDurationGOOGLE ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct RenderPassAttachmentBeginInfo { - VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo( uint32_t attachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ = {} ) VULKAN_HPP_NOEXCEPT - : attachmentCount( attachmentCount_ ) - , pAttachments( pAttachments_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassAttachmentBeginInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo(uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ = {}) VULKAN_HPP_NOEXCEPT + : attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ ) {} - VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo & operator=( VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo ) - offsetof( RenderPassAttachmentBeginInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; RenderPassAttachmentBeginInfo( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - RenderPassAttachmentBeginInfo& operator=( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RenderPassAttachmentBeginInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) + : attachmentCount( static_cast( attachments_.size() ) ), pAttachments( attachments_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + RenderPassAttachmentBeginInfo & operator=( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RenderPassAttachmentBeginInfo & operator=( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( RenderPassAttachmentBeginInfo ) ); return *this; } @@ -55534,6 +75439,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RenderPassAttachmentBeginInfo & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkRenderPassAttachmentBeginInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -55544,6 +75459,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RenderPassAttachmentBeginInfo const& ) const = default; +#else bool operator==( RenderPassAttachmentBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -55556,959 +75475,54 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassAttachmentBeginInfo; const void* pNext = {}; uint32_t attachmentCount = {}; const VULKAN_HPP_NAMESPACE::ImageView* pAttachments = {}; + }; static_assert( sizeof( RenderPassAttachmentBeginInfo ) == sizeof( VkRenderPassAttachmentBeginInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct RenderPassBeginInfo + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, - VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, - VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, - uint32_t clearValueCount_ = {}, - const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ = {} ) VULKAN_HPP_NOEXCEPT - : renderPass( renderPass_ ) - , framebuffer( framebuffer_ ) - , renderArea( renderArea_ ) - , clearValueCount( clearValueCount_ ) - , pClearValues( pClearValues_ ) - {} - - VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & operator=( VULKAN_HPP_NAMESPACE::RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassBeginInfo ) - offsetof( RenderPassBeginInfo, pNext ) ); - return *this; - } - - RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - RenderPassBeginInfo& operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - RenderPassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - RenderPassBeginInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT - { - renderPass = renderPass_; - return *this; - } - - RenderPassBeginInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT - { - framebuffer = framebuffer_; - return *this; - } - - RenderPassBeginInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D renderArea_ ) VULKAN_HPP_NOEXCEPT - { - renderArea = renderArea_; - return *this; - } - - RenderPassBeginInfo & setClearValueCount( uint32_t clearValueCount_ ) VULKAN_HPP_NOEXCEPT - { - clearValueCount = clearValueCount_; - return *this; - } - - RenderPassBeginInfo & setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ ) VULKAN_HPP_NOEXCEPT - { - pClearValues = pClearValues_; - return *this; - } - - operator VkRenderPassBeginInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( RenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( renderPass == rhs.renderPass ) - && ( framebuffer == rhs.framebuffer ) - && ( renderArea == rhs.renderArea ) - && ( clearValueCount == rhs.clearValueCount ) - && ( pClearValues == rhs.pClearValues ); - } - - bool operator!=( RenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassBeginInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; - VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {}; - VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; - uint32_t clearValueCount = {}; - const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues = {}; + using Type = RenderPassAttachmentBeginInfo; }; - static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SubpassDescription - { - VULKAN_HPP_CONSTEXPR SubpassDescription( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, - uint32_t inputAttachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments_ = {}, - uint32_t colorAttachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment_ = {}, - uint32_t preserveAttachmentCount_ = {}, - const uint32_t* pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , pipelineBindPoint( pipelineBindPoint_ ) - , inputAttachmentCount( inputAttachmentCount_ ) - , pInputAttachments( pInputAttachments_ ) - , colorAttachmentCount( colorAttachmentCount_ ) - , pColorAttachments( pColorAttachments_ ) - , pResolveAttachments( pResolveAttachments_ ) - , pDepthStencilAttachment( pDepthStencilAttachment_ ) - , preserveAttachmentCount( preserveAttachmentCount_ ) - , pPreserveAttachments( pPreserveAttachments_ ) - {} - - SubpassDescription( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SubpassDescription& operator=( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SubpassDescription & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - SubpassDescription & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT - { - pipelineBindPoint = pipelineBindPoint_; - return *this; - } - - SubpassDescription & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT - { - inputAttachmentCount = inputAttachmentCount_; - return *this; - } - - SubpassDescription & setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments_ ) VULKAN_HPP_NOEXCEPT - { - pInputAttachments = pInputAttachments_; - return *this; - } - - SubpassDescription & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT - { - colorAttachmentCount = colorAttachmentCount_; - return *this; - } - - SubpassDescription & setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments_ ) VULKAN_HPP_NOEXCEPT - { - pColorAttachments = pColorAttachments_; - return *this; - } - - SubpassDescription & setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT - { - pResolveAttachments = pResolveAttachments_; - return *this; - } - - SubpassDescription & setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT - { - pDepthStencilAttachment = pDepthStencilAttachment_; - return *this; - } - - SubpassDescription & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT - { - preserveAttachmentCount = preserveAttachmentCount_; - return *this; - } - - SubpassDescription & setPPreserveAttachments( const uint32_t* pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT - { - pPreserveAttachments = pPreserveAttachments_; - return *this; - } - - operator VkSubpassDescription const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSubpassDescription &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( SubpassDescription const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( flags == rhs.flags ) - && ( pipelineBindPoint == rhs.pipelineBindPoint ) - && ( inputAttachmentCount == rhs.inputAttachmentCount ) - && ( pInputAttachments == rhs.pInputAttachments ) - && ( colorAttachmentCount == rhs.colorAttachmentCount ) - && ( pColorAttachments == rhs.pColorAttachments ) - && ( pResolveAttachments == rhs.pResolveAttachments ) - && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) - && ( preserveAttachmentCount == rhs.preserveAttachmentCount ) - && ( pPreserveAttachments == rhs.pPreserveAttachments ); - } - - bool operator!=( SubpassDescription const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {}; - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; - uint32_t inputAttachmentCount = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments = {}; - uint32_t colorAttachmentCount = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment = {}; - uint32_t preserveAttachmentCount = {}; - const uint32_t* pPreserveAttachments = {}; - }; - static_assert( sizeof( SubpassDescription ) == sizeof( VkSubpassDescription ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SubpassDependency - { - VULKAN_HPP_CONSTEXPR SubpassDependency( uint32_t srcSubpass_ = {}, - uint32_t dstSubpass_ = {}, - VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, - VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {} ) VULKAN_HPP_NOEXCEPT - : srcSubpass( srcSubpass_ ) - , dstSubpass( dstSubpass_ ) - , srcStageMask( srcStageMask_ ) - , dstStageMask( dstStageMask_ ) - , srcAccessMask( srcAccessMask_ ) - , dstAccessMask( dstAccessMask_ ) - , dependencyFlags( dependencyFlags_ ) - {} - - SubpassDependency( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SubpassDependency& operator=( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SubpassDependency & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT - { - srcSubpass = srcSubpass_; - return *this; - } - - SubpassDependency & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT - { - dstSubpass = dstSubpass_; - return *this; - } - - SubpassDependency & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT - { - srcStageMask = srcStageMask_; - return *this; - } - - SubpassDependency & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT - { - dstStageMask = dstStageMask_; - return *this; - } - - SubpassDependency & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT - { - srcAccessMask = srcAccessMask_; - return *this; - } - - SubpassDependency & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT - { - dstAccessMask = dstAccessMask_; - return *this; - } - - SubpassDependency & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT - { - dependencyFlags = dependencyFlags_; - return *this; - } - - operator VkSubpassDependency const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSubpassDependency &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( SubpassDependency const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( srcSubpass == rhs.srcSubpass ) - && ( dstSubpass == rhs.dstSubpass ) - && ( srcStageMask == rhs.srcStageMask ) - && ( dstStageMask == rhs.dstStageMask ) - && ( srcAccessMask == rhs.srcAccessMask ) - && ( dstAccessMask == rhs.dstAccessMask ) - && ( dependencyFlags == rhs.dependencyFlags ); - } - - bool operator!=( SubpassDependency const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - uint32_t srcSubpass = {}; - uint32_t dstSubpass = {}; - VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {}; - VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; - VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; - }; - static_assert( sizeof( SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct RenderPassCreateInfo - { - VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, - uint32_t attachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments_ = {}, - uint32_t subpassCount_ = {}, - const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses_ = {}, - uint32_t dependencyCount_ = {}, - const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , attachmentCount( attachmentCount_ ) - , pAttachments( pAttachments_ ) - , subpassCount( subpassCount_ ) - , pSubpasses( pSubpasses_ ) - , dependencyCount( dependencyCount_ ) - , pDependencies( pDependencies_ ) - {} - - VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & operator=( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo ) - offsetof( RenderPassCreateInfo, pNext ) ); - return *this; - } - - RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - RenderPassCreateInfo& operator=( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - RenderPassCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - RenderPassCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - RenderPassCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT - { - attachmentCount = attachmentCount_; - return *this; - } - - RenderPassCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments_ ) VULKAN_HPP_NOEXCEPT - { - pAttachments = pAttachments_; - return *this; - } - - RenderPassCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT - { - subpassCount = subpassCount_; - return *this; - } - - RenderPassCreateInfo & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses_ ) VULKAN_HPP_NOEXCEPT - { - pSubpasses = pSubpasses_; - return *this; - } - - RenderPassCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT - { - dependencyCount = dependencyCount_; - return *this; - } - - RenderPassCreateInfo & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies_ ) VULKAN_HPP_NOEXCEPT - { - pDependencies = pDependencies_; - return *this; - } - - operator VkRenderPassCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkRenderPassCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( RenderPassCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( attachmentCount == rhs.attachmentCount ) - && ( pAttachments == rhs.pAttachments ) - && ( subpassCount == rhs.subpassCount ) - && ( pSubpasses == rhs.pSubpasses ) - && ( dependencyCount == rhs.dependencyCount ) - && ( pDependencies == rhs.pDependencies ); - } - - bool operator!=( RenderPassCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {}; - uint32_t attachmentCount = {}; - const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments = {}; - uint32_t subpassCount = {}; - const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses = {}; - uint32_t dependencyCount = {}; - const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies = {}; - }; - static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SubpassDescription2 - { - VULKAN_HPP_CONSTEXPR SubpassDescription2( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, - uint32_t viewMask_ = {}, - uint32_t inputAttachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pInputAttachments_ = {}, - uint32_t colorAttachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pColorAttachments_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pResolveAttachments_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment_ = {}, - uint32_t preserveAttachmentCount_ = {}, - const uint32_t* pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , pipelineBindPoint( pipelineBindPoint_ ) - , viewMask( viewMask_ ) - , inputAttachmentCount( inputAttachmentCount_ ) - , pInputAttachments( pInputAttachments_ ) - , colorAttachmentCount( colorAttachmentCount_ ) - , pColorAttachments( pColorAttachments_ ) - , pResolveAttachments( pResolveAttachments_ ) - , pDepthStencilAttachment( pDepthStencilAttachment_ ) - , preserveAttachmentCount( preserveAttachmentCount_ ) - , pPreserveAttachments( pPreserveAttachments_ ) - {} - - VULKAN_HPP_NAMESPACE::SubpassDescription2 & operator=( VULKAN_HPP_NAMESPACE::SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SubpassDescription2 ) - offsetof( SubpassDescription2, pNext ) ); - return *this; - } - - SubpassDescription2( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SubpassDescription2& operator=( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SubpassDescription2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - SubpassDescription2 & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - SubpassDescription2 & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT - { - pipelineBindPoint = pipelineBindPoint_; - return *this; - } - - SubpassDescription2 & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT - { - viewMask = viewMask_; - return *this; - } - - SubpassDescription2 & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT - { - inputAttachmentCount = inputAttachmentCount_; - return *this; - } - - SubpassDescription2 & setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pInputAttachments_ ) VULKAN_HPP_NOEXCEPT - { - pInputAttachments = pInputAttachments_; - return *this; - } - - SubpassDescription2 & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT - { - colorAttachmentCount = colorAttachmentCount_; - return *this; - } - - SubpassDescription2 & setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pColorAttachments_ ) VULKAN_HPP_NOEXCEPT - { - pColorAttachments = pColorAttachments_; - return *this; - } - - SubpassDescription2 & setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT - { - pResolveAttachments = pResolveAttachments_; - return *this; - } - - SubpassDescription2 & setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT - { - pDepthStencilAttachment = pDepthStencilAttachment_; - return *this; - } - - SubpassDescription2 & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT - { - preserveAttachmentCount = preserveAttachmentCount_; - return *this; - } - - SubpassDescription2 & setPPreserveAttachments( const uint32_t* pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT - { - pPreserveAttachments = pPreserveAttachments_; - return *this; - } - - operator VkSubpassDescription2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSubpassDescription2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( SubpassDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pipelineBindPoint == rhs.pipelineBindPoint ) - && ( viewMask == rhs.viewMask ) - && ( inputAttachmentCount == rhs.inputAttachmentCount ) - && ( pInputAttachments == rhs.pInputAttachments ) - && ( colorAttachmentCount == rhs.colorAttachmentCount ) - && ( pColorAttachments == rhs.pColorAttachments ) - && ( pResolveAttachments == rhs.pResolveAttachments ) - && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) - && ( preserveAttachmentCount == rhs.preserveAttachmentCount ) - && ( pPreserveAttachments == rhs.pPreserveAttachments ); - } - - bool operator!=( SubpassDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescription2; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {}; - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; - uint32_t viewMask = {}; - uint32_t inputAttachmentCount = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pInputAttachments = {}; - uint32_t colorAttachmentCount = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pColorAttachments = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pResolveAttachments = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment = {}; - uint32_t preserveAttachmentCount = {}; - const uint32_t* pPreserveAttachments = {}; - }; - static_assert( sizeof( SubpassDescription2 ) == sizeof( VkSubpassDescription2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SubpassDependency2 - { - VULKAN_HPP_CONSTEXPR SubpassDependency2( uint32_t srcSubpass_ = {}, - uint32_t dstSubpass_ = {}, - VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, - VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, - int32_t viewOffset_ = {} ) VULKAN_HPP_NOEXCEPT - : srcSubpass( srcSubpass_ ) - , dstSubpass( dstSubpass_ ) - , srcStageMask( srcStageMask_ ) - , dstStageMask( dstStageMask_ ) - , srcAccessMask( srcAccessMask_ ) - , dstAccessMask( dstAccessMask_ ) - , dependencyFlags( dependencyFlags_ ) - , viewOffset( viewOffset_ ) - {} - - VULKAN_HPP_NAMESPACE::SubpassDependency2 & operator=( VULKAN_HPP_NAMESPACE::SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SubpassDependency2 ) - offsetof( SubpassDependency2, pNext ) ); - return *this; - } - - SubpassDependency2( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SubpassDependency2& operator=( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SubpassDependency2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - SubpassDependency2 & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT - { - srcSubpass = srcSubpass_; - return *this; - } - - SubpassDependency2 & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT - { - dstSubpass = dstSubpass_; - return *this; - } - - SubpassDependency2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT - { - srcStageMask = srcStageMask_; - return *this; - } - - SubpassDependency2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT - { - dstStageMask = dstStageMask_; - return *this; - } - - SubpassDependency2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT - { - srcAccessMask = srcAccessMask_; - return *this; - } - - SubpassDependency2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT - { - dstAccessMask = dstAccessMask_; - return *this; - } - - SubpassDependency2 & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT - { - dependencyFlags = dependencyFlags_; - return *this; - } - - SubpassDependency2 & setViewOffset( int32_t viewOffset_ ) VULKAN_HPP_NOEXCEPT - { - viewOffset = viewOffset_; - return *this; - } - - operator VkSubpassDependency2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSubpassDependency2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( SubpassDependency2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( srcSubpass == rhs.srcSubpass ) - && ( dstSubpass == rhs.dstSubpass ) - && ( srcStageMask == rhs.srcStageMask ) - && ( dstStageMask == rhs.dstStageMask ) - && ( srcAccessMask == rhs.srcAccessMask ) - && ( dstAccessMask == rhs.dstAccessMask ) - && ( dependencyFlags == rhs.dependencyFlags ) - && ( viewOffset == rhs.viewOffset ); - } - - bool operator!=( SubpassDependency2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDependency2; - const void* pNext = {}; - uint32_t srcSubpass = {}; - uint32_t dstSubpass = {}; - VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {}; - VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; - VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; - int32_t viewOffset = {}; - }; - static_assert( sizeof( SubpassDependency2 ) == sizeof( VkSubpassDependency2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct RenderPassCreateInfo2 - { - VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, - uint32_t attachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentDescription2* pAttachments_ = {}, - uint32_t subpassCount_ = {}, - const VULKAN_HPP_NAMESPACE::SubpassDescription2* pSubpasses_ = {}, - uint32_t dependencyCount_ = {}, - const VULKAN_HPP_NAMESPACE::SubpassDependency2* pDependencies_ = {}, - uint32_t correlatedViewMaskCount_ = {}, - const uint32_t* pCorrelatedViewMasks_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , attachmentCount( attachmentCount_ ) - , pAttachments( pAttachments_ ) - , subpassCount( subpassCount_ ) - , pSubpasses( pSubpasses_ ) - , dependencyCount( dependencyCount_ ) - , pDependencies( pDependencies_ ) - , correlatedViewMaskCount( correlatedViewMaskCount_ ) - , pCorrelatedViewMasks( pCorrelatedViewMasks_ ) - {} - - VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & operator=( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 ) - offsetof( RenderPassCreateInfo2, pNext ) ); - return *this; - } - - RenderPassCreateInfo2( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - RenderPassCreateInfo2& operator=( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - RenderPassCreateInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - RenderPassCreateInfo2 & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - RenderPassCreateInfo2 & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT - { - attachmentCount = attachmentCount_; - return *this; - } - - RenderPassCreateInfo2 & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription2* pAttachments_ ) VULKAN_HPP_NOEXCEPT - { - pAttachments = pAttachments_; - return *this; - } - - RenderPassCreateInfo2 & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT - { - subpassCount = subpassCount_; - return *this; - } - - RenderPassCreateInfo2 & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription2* pSubpasses_ ) VULKAN_HPP_NOEXCEPT - { - pSubpasses = pSubpasses_; - return *this; - } - - RenderPassCreateInfo2 & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT - { - dependencyCount = dependencyCount_; - return *this; - } - - RenderPassCreateInfo2 & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency2* pDependencies_ ) VULKAN_HPP_NOEXCEPT - { - pDependencies = pDependencies_; - return *this; - } - - RenderPassCreateInfo2 & setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ ) VULKAN_HPP_NOEXCEPT - { - correlatedViewMaskCount = correlatedViewMaskCount_; - return *this; - } - - RenderPassCreateInfo2 & setPCorrelatedViewMasks( const uint32_t* pCorrelatedViewMasks_ ) VULKAN_HPP_NOEXCEPT - { - pCorrelatedViewMasks = pCorrelatedViewMasks_; - return *this; - } - - operator VkRenderPassCreateInfo2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkRenderPassCreateInfo2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( RenderPassCreateInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( attachmentCount == rhs.attachmentCount ) - && ( pAttachments == rhs.pAttachments ) - && ( subpassCount == rhs.subpassCount ) - && ( pSubpasses == rhs.pSubpasses ) - && ( dependencyCount == rhs.dependencyCount ) - && ( pDependencies == rhs.pDependencies ) - && ( correlatedViewMaskCount == rhs.correlatedViewMaskCount ) - && ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks ); - } - - bool operator!=( RenderPassCreateInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo2; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {}; - uint32_t attachmentCount = {}; - const VULKAN_HPP_NAMESPACE::AttachmentDescription2* pAttachments = {}; - uint32_t subpassCount = {}; - const VULKAN_HPP_NAMESPACE::SubpassDescription2* pSubpasses = {}; - uint32_t dependencyCount = {}; - const VULKAN_HPP_NAMESPACE::SubpassDependency2* pDependencies = {}; - uint32_t correlatedViewMaskCount = {}; - const uint32_t* pCorrelatedViewMasks = {}; - }; - static_assert( sizeof( RenderPassCreateInfo2 ) == sizeof( VkRenderPassCreateInfo2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + using RenderPassAttachmentBeginInfoKHR = RenderPassAttachmentBeginInfo; struct RenderPassFragmentDensityMapCreateInfoEXT { - VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ = {} ) VULKAN_HPP_NOEXCEPT - : fragmentDensityMapAttachment( fragmentDensityMapAttachment_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT(VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ = {}) VULKAN_HPP_NOEXCEPT + : fragmentDensityMapAttachment( fragmentDensityMapAttachment_ ) {} - VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT ) - offsetof( RenderPassFragmentDensityMapCreateInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; RenderPassFragmentDensityMapCreateInfoEXT( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - RenderPassFragmentDensityMapCreateInfoEXT& operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassFragmentDensityMapCreateInfoEXT & operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RenderPassFragmentDensityMapCreateInfoEXT & operator=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( RenderPassFragmentDensityMapCreateInfoEXT ) ); return *this; } @@ -56518,12 +75532,13 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - RenderPassFragmentDensityMapCreateInfoEXT & setFragmentDensityMapAttachment( VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ ) VULKAN_HPP_NOEXCEPT + RenderPassFragmentDensityMapCreateInfoEXT & setFragmentDensityMapAttachment( VULKAN_HPP_NAMESPACE::AttachmentReference const & fragmentDensityMapAttachment_ ) VULKAN_HPP_NOEXCEPT { fragmentDensityMapAttachment = fragmentDensityMapAttachment_; return *this; } + operator VkRenderPassFragmentDensityMapCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -56534,6 +75549,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RenderPassFragmentDensityMapCreateInfoEXT const& ) const = default; +#else bool operator==( RenderPassFragmentDensityMapCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -56545,37 +75564,58 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT; const void* pNext = {}; VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment = {}; + }; static_assert( sizeof( RenderPassFragmentDensityMapCreateInfoEXT ) == sizeof( VkRenderPassFragmentDensityMapCreateInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = RenderPassFragmentDensityMapCreateInfoEXT; + }; + struct RenderPassInputAttachmentAspectCreateInfo { - VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( uint32_t aspectReferenceCount_ = {}, - const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference* pAspectReferences_ = {} ) VULKAN_HPP_NOEXCEPT - : aspectReferenceCount( aspectReferenceCount_ ) - , pAspectReferences( pAspectReferences_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassInputAttachmentAspectCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo(uint32_t aspectReferenceCount_ = {}, const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference* pAspectReferences_ = {}) VULKAN_HPP_NOEXCEPT + : aspectReferenceCount( aspectReferenceCount_ ), pAspectReferences( pAspectReferences_ ) {} - VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo & operator=( VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo ) - offsetof( RenderPassInputAttachmentAspectCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - RenderPassInputAttachmentAspectCreateInfo& operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RenderPassInputAttachmentAspectCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & aspectReferences_ ) + : aspectReferenceCount( static_cast( aspectReferences_.size() ) ), pAspectReferences( aspectReferences_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + RenderPassInputAttachmentAspectCreateInfo & operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RenderPassInputAttachmentAspectCreateInfo & operator=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( RenderPassInputAttachmentAspectCreateInfo ) ); return *this; } @@ -56597,6 +75637,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RenderPassInputAttachmentAspectCreateInfo & setAspectReferences( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & aspectReferences_ ) VULKAN_HPP_NOEXCEPT + { + aspectReferenceCount = static_cast( aspectReferences_.size() ); + pAspectReferences = aspectReferences_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkRenderPassInputAttachmentAspectCreateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -56607,6 +75657,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RenderPassInputAttachmentAspectCreateInfo const& ) const = default; +#else bool operator==( RenderPassInputAttachmentAspectCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -56619,46 +75673,60 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo; const void* pNext = {}; uint32_t aspectReferenceCount = {}; const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference* pAspectReferences = {}; + }; static_assert( sizeof( RenderPassInputAttachmentAspectCreateInfo ) == sizeof( VkRenderPassInputAttachmentAspectCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = RenderPassInputAttachmentAspectCreateInfo; + }; + using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo; + struct RenderPassMultiviewCreateInfo { - VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( uint32_t subpassCount_ = {}, - const uint32_t* pViewMasks_ = {}, - uint32_t dependencyCount_ = {}, - const int32_t* pViewOffsets_ = {}, - uint32_t correlationMaskCount_ = {}, - const uint32_t* pCorrelationMasks_ = {} ) VULKAN_HPP_NOEXCEPT - : subpassCount( subpassCount_ ) - , pViewMasks( pViewMasks_ ) - , dependencyCount( dependencyCount_ ) - , pViewOffsets( pViewOffsets_ ) - , correlationMaskCount( correlationMaskCount_ ) - , pCorrelationMasks( pCorrelationMasks_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassMultiviewCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo(uint32_t subpassCount_ = {}, const uint32_t* pViewMasks_ = {}, uint32_t dependencyCount_ = {}, const int32_t* pViewOffsets_ = {}, uint32_t correlationMaskCount_ = {}, const uint32_t* pCorrelationMasks_ = {}) VULKAN_HPP_NOEXCEPT + : subpassCount( subpassCount_ ), pViewMasks( pViewMasks_ ), dependencyCount( dependencyCount_ ), pViewOffsets( pViewOffsets_ ), correlationMaskCount( correlationMaskCount_ ), pCorrelationMasks( pCorrelationMasks_ ) {} - VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo & operator=( VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo ) - offsetof( RenderPassMultiviewCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; RenderPassMultiviewCreateInfo( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - RenderPassMultiviewCreateInfo& operator=( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RenderPassMultiviewCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewMasks_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewOffsets_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlationMasks_ = {} ) + : subpassCount( static_cast( viewMasks_.size() ) ), pViewMasks( viewMasks_.data() ), dependencyCount( static_cast( viewOffsets_.size() ) ), pViewOffsets( viewOffsets_.data() ), correlationMaskCount( static_cast( correlationMasks_.size() ) ), pCorrelationMasks( correlationMasks_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + RenderPassMultiviewCreateInfo & operator=( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RenderPassMultiviewCreateInfo & operator=( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( RenderPassMultiviewCreateInfo ) ); return *this; } @@ -56680,6 +75748,15 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RenderPassMultiviewCreateInfo & setViewMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewMasks_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = static_cast( viewMasks_.size() ); + pViewMasks = viewMasks_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RenderPassMultiviewCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT { dependencyCount = dependencyCount_; @@ -56692,6 +75769,15 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RenderPassMultiviewCreateInfo & setViewOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewOffsets_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = static_cast( viewOffsets_.size() ); + pViewOffsets = viewOffsets_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RenderPassMultiviewCreateInfo & setCorrelationMaskCount( uint32_t correlationMaskCount_ ) VULKAN_HPP_NOEXCEPT { correlationMaskCount = correlationMaskCount_; @@ -56704,6 +75790,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RenderPassMultiviewCreateInfo & setCorrelationMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlationMasks_ ) VULKAN_HPP_NOEXCEPT + { + correlationMaskCount = static_cast( correlationMasks_.size() ); + pCorrelationMasks = correlationMasks_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkRenderPassMultiviewCreateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -56714,6 +75810,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RenderPassMultiviewCreateInfo const& ) const = default; +#else bool operator==( RenderPassMultiviewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -56730,6 +75830,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassMultiviewCreateInfo; @@ -56740,26 +75843,44 @@ namespace VULKAN_HPP_NAMESPACE const int32_t* pViewOffsets = {}; uint32_t correlationMaskCount = {}; const uint32_t* pCorrelationMasks = {}; + }; static_assert( sizeof( RenderPassMultiviewCreateInfo ) == sizeof( VkRenderPassMultiviewCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = RenderPassMultiviewCreateInfo; + }; + using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo; + struct SubpassSampleLocationsEXT { - VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( uint32_t subpassIndex_ = {}, - VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT - : subpassIndex( subpassIndex_ ) - , sampleLocationsInfo( sampleLocationsInfo_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT(uint32_t subpassIndex_ = {}, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}) VULKAN_HPP_NOEXCEPT + : subpassIndex( subpassIndex_ ), sampleLocationsInfo( sampleLocationsInfo_ ) {} + VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SubpassSampleLocationsEXT( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - SubpassSampleLocationsEXT& operator=( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SubpassSampleLocationsEXT & operator=( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SubpassSampleLocationsEXT & operator=( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SubpassSampleLocationsEXT ) ); return *this; } @@ -56769,12 +75890,13 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SubpassSampleLocationsEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT + SubpassSampleLocationsEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT { sampleLocationsInfo = sampleLocationsInfo_; return *this; } + operator VkSubpassSampleLocationsEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -56785,6 +75907,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SubpassSampleLocationsEXT const& ) const = default; +#else bool operator==( SubpassSampleLocationsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( subpassIndex == rhs.subpassIndex ) @@ -56795,40 +75921,51 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: uint32_t subpassIndex = {}; VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; + }; static_assert( sizeof( SubpassSampleLocationsEXT ) == sizeof( VkSubpassSampleLocationsEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct RenderPassSampleLocationsBeginInfoEXT { - VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( uint32_t attachmentInitialSampleLocationsCount_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ = {}, - uint32_t postSubpassSampleLocationsCount_ = {}, - const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT - : attachmentInitialSampleLocationsCount( attachmentInitialSampleLocationsCount_ ) - , pAttachmentInitialSampleLocations( pAttachmentInitialSampleLocations_ ) - , postSubpassSampleLocationsCount( postSubpassSampleLocationsCount_ ) - , pPostSubpassSampleLocations( pPostSubpassSampleLocations_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassSampleLocationsBeginInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT(uint32_t attachmentInitialSampleLocationsCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ = {}, uint32_t postSubpassSampleLocationsCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ = {}) VULKAN_HPP_NOEXCEPT + : attachmentInitialSampleLocationsCount( attachmentInitialSampleLocationsCount_ ), pAttachmentInitialSampleLocations( pAttachmentInitialSampleLocations_ ), postSubpassSampleLocationsCount( postSubpassSampleLocationsCount_ ), pPostSubpassSampleLocations( pPostSubpassSampleLocations_ ) {} - VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT & operator=( VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT ) - offsetof( RenderPassSampleLocationsBeginInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; RenderPassSampleLocationsBeginInfoEXT( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - RenderPassSampleLocationsBeginInfoEXT& operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RenderPassSampleLocationsBeginInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachmentInitialSampleLocations_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & postSubpassSampleLocations_ = {} ) + : attachmentInitialSampleLocationsCount( static_cast( attachmentInitialSampleLocations_.size() ) ), pAttachmentInitialSampleLocations( attachmentInitialSampleLocations_.data() ), postSubpassSampleLocationsCount( static_cast( postSubpassSampleLocations_.size() ) ), pPostSubpassSampleLocations( postSubpassSampleLocations_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + RenderPassSampleLocationsBeginInfoEXT & operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RenderPassSampleLocationsBeginInfoEXT & operator=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( RenderPassSampleLocationsBeginInfoEXT ) ); return *this; } @@ -56850,6 +75987,15 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + attachmentInitialSampleLocationsCount = static_cast( attachmentInitialSampleLocations_.size() ); + pAttachmentInitialSampleLocations = attachmentInitialSampleLocations_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT { postSubpassSampleLocationsCount = postSubpassSampleLocationsCount_; @@ -56862,6 +76008,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & postSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + postSubpassSampleLocationsCount = static_cast( postSubpassSampleLocations_.size() ); + pPostSubpassSampleLocations = postSubpassSampleLocations_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkRenderPassSampleLocationsBeginInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -56872,6 +76028,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RenderPassSampleLocationsBeginInfoEXT const& ) const = default; +#else bool operator==( RenderPassSampleLocationsBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -56886,6 +76046,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT; @@ -56894,245 +76057,209 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations = {}; uint32_t postSubpassSampleLocationsCount = {}; const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT* pPostSubpassSampleLocations = {}; + }; static_assert( sizeof( RenderPassSampleLocationsBeginInfoEXT ) == sizeof( VkRenderPassSampleLocationsBeginInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct SamplerCreateInfo + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR SamplerCreateInfo( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Filter magFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, - VULKAN_HPP_NAMESPACE::Filter minFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, - VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest, - VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, - VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, - VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, - float mipLodBias_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ = {}, - float maxAnisotropy_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ = {}, - VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, - float minLod_ = {}, - float maxLod_ = {}, - VULKAN_HPP_NAMESPACE::BorderColor borderColor_ = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack, - VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , magFilter( magFilter_ ) - , minFilter( minFilter_ ) - , mipmapMode( mipmapMode_ ) - , addressModeU( addressModeU_ ) - , addressModeV( addressModeV_ ) - , addressModeW( addressModeW_ ) - , mipLodBias( mipLodBias_ ) - , anisotropyEnable( anisotropyEnable_ ) - , maxAnisotropy( maxAnisotropy_ ) - , compareEnable( compareEnable_ ) - , compareOp( compareOp_ ) - , minLod( minLod_ ) - , maxLod( maxLod_ ) - , borderColor( borderColor_ ) - , unnormalizedCoordinates( unnormalizedCoordinates_ ) + using Type = RenderPassSampleLocationsBeginInfoEXT; + }; + + struct RenderPassTransformBeginInfoQCOM + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassTransformBeginInfoQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM(VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity) VULKAN_HPP_NOEXCEPT + : transform( transform_ ) {} - VULKAN_HPP_NAMESPACE::SamplerCreateInfo & operator=( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SamplerCreateInfo ) - offsetof( SamplerCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassTransformBeginInfoQCOM( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - SamplerCreateInfo& operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassTransformBeginInfoQCOM & operator=( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SamplerCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + RenderPassTransformBeginInfoQCOM & operator=( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( RenderPassTransformBeginInfoQCOM ) ); + return *this; + } + + RenderPassTransformBeginInfoQCOM & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - SamplerCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + RenderPassTransformBeginInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + transform = transform_; return *this; } - SamplerCreateInfo & setMagFilter( VULKAN_HPP_NAMESPACE::Filter magFilter_ ) VULKAN_HPP_NOEXCEPT + + operator VkRenderPassTransformBeginInfoQCOM const&() const VULKAN_HPP_NOEXCEPT { - magFilter = magFilter_; - return *this; + return *reinterpret_cast( this ); } - SamplerCreateInfo & setMinFilter( VULKAN_HPP_NAMESPACE::Filter minFilter_ ) VULKAN_HPP_NOEXCEPT + operator VkRenderPassTransformBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT { - minFilter = minFilter_; - return *this; + return *reinterpret_cast( this ); } - SamplerCreateInfo & setMipmapMode( VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ ) VULKAN_HPP_NOEXCEPT - { - mipmapMode = mipmapMode_; - return *this; - } - SamplerCreateInfo & setAddressModeU( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ ) VULKAN_HPP_NOEXCEPT - { - addressModeU = addressModeU_; - return *this; - } - - SamplerCreateInfo & setAddressModeV( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ ) VULKAN_HPP_NOEXCEPT - { - addressModeV = addressModeV_; - return *this; - } - - SamplerCreateInfo & setAddressModeW( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ ) VULKAN_HPP_NOEXCEPT - { - addressModeW = addressModeW_; - return *this; - } - - SamplerCreateInfo & setMipLodBias( float mipLodBias_ ) VULKAN_HPP_NOEXCEPT - { - mipLodBias = mipLodBias_; - return *this; - } - - SamplerCreateInfo & setAnisotropyEnable( VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ ) VULKAN_HPP_NOEXCEPT - { - anisotropyEnable = anisotropyEnable_; - return *this; - } - - SamplerCreateInfo & setMaxAnisotropy( float maxAnisotropy_ ) VULKAN_HPP_NOEXCEPT - { - maxAnisotropy = maxAnisotropy_; - return *this; - } - - SamplerCreateInfo & setCompareEnable( VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ ) VULKAN_HPP_NOEXCEPT - { - compareEnable = compareEnable_; - return *this; - } - - SamplerCreateInfo & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT - { - compareOp = compareOp_; - return *this; - } - - SamplerCreateInfo & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT - { - minLod = minLod_; - return *this; - } - - SamplerCreateInfo & setMaxLod( float maxLod_ ) VULKAN_HPP_NOEXCEPT - { - maxLod = maxLod_; - return *this; - } - - SamplerCreateInfo & setBorderColor( VULKAN_HPP_NAMESPACE::BorderColor borderColor_ ) VULKAN_HPP_NOEXCEPT - { - borderColor = borderColor_; - return *this; - } - - SamplerCreateInfo & setUnnormalizedCoordinates( VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ ) VULKAN_HPP_NOEXCEPT - { - unnormalizedCoordinates = unnormalizedCoordinates_; - return *this; - } - - operator VkSamplerCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( SamplerCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RenderPassTransformBeginInfoQCOM const& ) const = default; +#else + bool operator==( RenderPassTransformBeginInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( magFilter == rhs.magFilter ) - && ( minFilter == rhs.minFilter ) - && ( mipmapMode == rhs.mipmapMode ) - && ( addressModeU == rhs.addressModeU ) - && ( addressModeV == rhs.addressModeV ) - && ( addressModeW == rhs.addressModeW ) - && ( mipLodBias == rhs.mipLodBias ) - && ( anisotropyEnable == rhs.anisotropyEnable ) - && ( maxAnisotropy == rhs.maxAnisotropy ) - && ( compareEnable == rhs.compareEnable ) - && ( compareOp == rhs.compareOp ) - && ( minLod == rhs.minLod ) - && ( maxLod == rhs.maxLod ) - && ( borderColor == rhs.borderColor ) - && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates ); + && ( transform == rhs.transform ); } - bool operator!=( SamplerCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassTransformBeginInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif + + public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::Filter magFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; - VULKAN_HPP_NAMESPACE::Filter minFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; - VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest; - VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; - VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; - VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; - float mipLodBias = {}; - VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable = {}; - float maxAnisotropy = {}; - VULKAN_HPP_NAMESPACE::Bool32 compareEnable = {}; - VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; - float minLod = {}; - float maxLod = {}; - VULKAN_HPP_NAMESPACE::BorderColor borderColor = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack; - VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates = {}; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassTransformBeginInfoQCOM; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + + }; + static_assert( sizeof( RenderPassTransformBeginInfoQCOM ) == sizeof( VkRenderPassTransformBeginInfoQCOM ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RenderPassTransformBeginInfoQCOM; + }; + + struct SamplerCustomBorderColorCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCustomBorderColorCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + SamplerCustomBorderColorCreateInfoEXT(VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined) VULKAN_HPP_NOEXCEPT + : customBorderColor( customBorderColor_ ), format( format_ ) + {} + + SamplerCustomBorderColorCreateInfoEXT( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerCustomBorderColorCreateInfoEXT( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + SamplerCustomBorderColorCreateInfoEXT & operator=( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SamplerCustomBorderColorCreateInfoEXT & operator=( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SamplerCustomBorderColorCreateInfoEXT ) ); + return *this; + } + + SamplerCustomBorderColorCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SamplerCustomBorderColorCreateInfoEXT & setCustomBorderColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & customBorderColor_ ) VULKAN_HPP_NOEXCEPT + { + customBorderColor = customBorderColor_; + return *this; + } + + SamplerCustomBorderColorCreateInfoEXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + + operator VkSamplerCustomBorderColorCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerCustomBorderColorCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCustomBorderColorCreateInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + + }; + static_assert( sizeof( SamplerCustomBorderColorCreateInfoEXT ) == sizeof( VkSamplerCustomBorderColorCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SamplerCustomBorderColorCreateInfoEXT; }; - static_assert( sizeof( SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SamplerReductionModeCreateInfo { - VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage ) VULKAN_HPP_NOEXCEPT - : reductionMode( reductionMode_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerReductionModeCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo(VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage) VULKAN_HPP_NOEXCEPT + : reductionMode( reductionMode_ ) {} - VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo & operator=( VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo ) - offsetof( SamplerReductionModeCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; SamplerReductionModeCreateInfo( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - SamplerReductionModeCreateInfo& operator=( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SamplerReductionModeCreateInfo & operator=( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SamplerReductionModeCreateInfo & operator=( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SamplerReductionModeCreateInfo ) ); return *this; } @@ -57148,6 +76275,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkSamplerReductionModeCreateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -57158,6 +76286,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SamplerReductionModeCreateInfo const& ) const = default; +#else bool operator==( SamplerReductionModeCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -57169,173 +76301,57 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerReductionModeCreateInfo; const void* pNext = {}; VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage; + }; static_assert( sizeof( SamplerReductionModeCreateInfo ) == sizeof( VkSamplerReductionModeCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct SamplerYcbcrConversionCreateInfo + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, - VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, - VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, - VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, - VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, - VULKAN_HPP_NAMESPACE::Filter chromaFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, - VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ = {} ) VULKAN_HPP_NOEXCEPT - : format( format_ ) - , ycbcrModel( ycbcrModel_ ) - , ycbcrRange( ycbcrRange_ ) - , components( components_ ) - , xChromaOffset( xChromaOffset_ ) - , yChromaOffset( yChromaOffset_ ) - , chromaFilter( chromaFilter_ ) - , forceExplicitReconstruction( forceExplicitReconstruction_ ) - {} - - VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & operator=( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo ) - offsetof( SamplerYcbcrConversionCreateInfo, pNext ) ); - return *this; - } - - SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SamplerYcbcrConversionCreateInfo& operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SamplerYcbcrConversionCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - SamplerYcbcrConversionCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT - { - format = format_; - return *this; - } - - SamplerYcbcrConversionCreateInfo & setYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ ) VULKAN_HPP_NOEXCEPT - { - ycbcrModel = ycbcrModel_; - return *this; - } - - SamplerYcbcrConversionCreateInfo & setYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ ) VULKAN_HPP_NOEXCEPT - { - ycbcrRange = ycbcrRange_; - return *this; - } - - SamplerYcbcrConversionCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping components_ ) VULKAN_HPP_NOEXCEPT - { - components = components_; - return *this; - } - - SamplerYcbcrConversionCreateInfo & setXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ ) VULKAN_HPP_NOEXCEPT - { - xChromaOffset = xChromaOffset_; - return *this; - } - - SamplerYcbcrConversionCreateInfo & setYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ ) VULKAN_HPP_NOEXCEPT - { - yChromaOffset = yChromaOffset_; - return *this; - } - - SamplerYcbcrConversionCreateInfo & setChromaFilter( VULKAN_HPP_NAMESPACE::Filter chromaFilter_ ) VULKAN_HPP_NOEXCEPT - { - chromaFilter = chromaFilter_; - return *this; - } - - SamplerYcbcrConversionCreateInfo & setForceExplicitReconstruction( VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ ) VULKAN_HPP_NOEXCEPT - { - forceExplicitReconstruction = forceExplicitReconstruction_; - return *this; - } - - operator VkSamplerYcbcrConversionCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSamplerYcbcrConversionCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( SamplerYcbcrConversionCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( format == rhs.format ) - && ( ycbcrModel == rhs.ycbcrModel ) - && ( ycbcrRange == rhs.ycbcrRange ) - && ( components == rhs.components ) - && ( xChromaOffset == rhs.xChromaOffset ) - && ( yChromaOffset == rhs.yChromaOffset ) - && ( chromaFilter == rhs.chromaFilter ) - && ( forceExplicitReconstruction == rhs.forceExplicitReconstruction ); - } - - bool operator!=( SamplerYcbcrConversionCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; - VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; - VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; - VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; - VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; - VULKAN_HPP_NAMESPACE::Filter chromaFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; - VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction = {}; + using Type = SamplerReductionModeCreateInfo; }; - static_assert( sizeof( SamplerYcbcrConversionCreateInfo ) == sizeof( VkSamplerYcbcrConversionCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + using SamplerReductionModeCreateInfoEXT = SamplerReductionModeCreateInfo; struct SamplerYcbcrConversionImageFormatProperties { - SamplerYcbcrConversionImageFormatProperties( uint32_t combinedImageSamplerDescriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT - : combinedImageSamplerDescriptorCount( combinedImageSamplerDescriptorCount_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionImageFormatProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties(uint32_t combinedImageSamplerDescriptorCount_ = {}) VULKAN_HPP_NOEXCEPT + : combinedImageSamplerDescriptorCount( combinedImageSamplerDescriptorCount_ ) {} - VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties & operator=( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties ) - offsetof( SamplerYcbcrConversionImageFormatProperties, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; SamplerYcbcrConversionImageFormatProperties( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - SamplerYcbcrConversionImageFormatProperties& operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + SamplerYcbcrConversionImageFormatProperties & operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + SamplerYcbcrConversionImageFormatProperties & operator=( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SamplerYcbcrConversionImageFormatProperties ) ); + return *this; + } + + operator VkSamplerYcbcrConversionImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -57346,6 +76362,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SamplerYcbcrConversionImageFormatProperties const& ) const = default; +#else bool operator==( SamplerYcbcrConversionImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -57357,35 +76377,53 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties; void* pNext = {}; uint32_t combinedImageSamplerDescriptorCount = {}; + }; static_assert( sizeof( SamplerYcbcrConversionImageFormatProperties ) == sizeof( VkSamplerYcbcrConversionImageFormatProperties ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = SamplerYcbcrConversionImageFormatProperties; + }; + using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties; + struct SamplerYcbcrConversionInfo { - VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ = {} ) VULKAN_HPP_NOEXCEPT - : conversion( conversion_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ = {}) VULKAN_HPP_NOEXCEPT + : conversion( conversion_ ) {} - VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo & operator=( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo ) - offsetof( SamplerYcbcrConversionInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; SamplerYcbcrConversionInfo( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - SamplerYcbcrConversionInfo& operator=( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SamplerYcbcrConversionInfo & operator=( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SamplerYcbcrConversionInfo & operator=( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SamplerYcbcrConversionInfo ) ); return *this; } @@ -57401,6 +76439,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkSamplerYcbcrConversionInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -57411,6 +76450,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SamplerYcbcrConversionInfo const& ) const = default; +#else bool operator==( SamplerYcbcrConversionInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -57422,330 +76465,53 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionInfo; const void* pNext = {}; VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion = {}; + }; static_assert( sizeof( SamplerYcbcrConversionInfo ) == sizeof( VkSamplerYcbcrConversionInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct SemaphoreCreateInfo + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - {} - - VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & operator=( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo ) - offsetof( SemaphoreCreateInfo, pNext ) ); - return *this; - } - - SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SemaphoreCreateInfo& operator=( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SemaphoreCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - SemaphoreCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - operator VkSemaphoreCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( SemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ); - } - - bool operator!=( SemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags = {}; + using Type = SamplerYcbcrConversionInfo; }; - static_assert( sizeof( SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SemaphoreGetFdInfoKHR - { - VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT - : semaphore( semaphore_ ) - , handleType( handleType_ ) - {} - - VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & operator=( VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR ) - offsetof( SemaphoreGetFdInfoKHR, pNext ) ); - return *this; - } - - SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SemaphoreGetFdInfoKHR& operator=( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SemaphoreGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - SemaphoreGetFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT - { - semaphore = semaphore_; - return *this; - } - - SemaphoreGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - operator VkSemaphoreGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSemaphoreGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( SemaphoreGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( semaphore == rhs.semaphore ) - && ( handleType == rhs.handleType ); - } - - bool operator!=( SemaphoreGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetFdInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; - }; - static_assert( sizeof( SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - struct SemaphoreGetWin32HandleInfoKHR - { - VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT - : semaphore( semaphore_ ) - , handleType( handleType_ ) - {} - - VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR ) - offsetof( SemaphoreGetWin32HandleInfoKHR, pNext ) ); - return *this; - } - - SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SemaphoreGetWin32HandleInfoKHR& operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SemaphoreGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - SemaphoreGetWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT - { - semaphore = semaphore_; - return *this; - } - - SemaphoreGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - operator VkSemaphoreGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSemaphoreGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( SemaphoreGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( semaphore == rhs.semaphore ) - && ( handleType == rhs.handleType ); - } - - bool operator!=( SemaphoreGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; - }; - static_assert( sizeof( SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - struct SemaphoreSignalInfo - { - VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, - uint64_t value_ = {} ) VULKAN_HPP_NOEXCEPT - : semaphore( semaphore_ ) - , value( value_ ) - {} - - VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & operator=( VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo ) - offsetof( SemaphoreSignalInfo, pNext ) ); - return *this; - } - - SemaphoreSignalInfo( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SemaphoreSignalInfo& operator=( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SemaphoreSignalInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - SemaphoreSignalInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT - { - semaphore = semaphore_; - return *this; - } - - SemaphoreSignalInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT - { - value = value_; - return *this; - } - - operator VkSemaphoreSignalInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSemaphoreSignalInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( SemaphoreSignalInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( semaphore == rhs.semaphore ) - && ( value == rhs.value ); - } - - bool operator!=( SemaphoreSignalInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSignalInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; - uint64_t value = {}; - }; - static_assert( sizeof( SemaphoreSignalInfo ) == sizeof( VkSemaphoreSignalInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo; struct SemaphoreTypeCreateInfo { - VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary, - uint64_t initialValue_ = {} ) VULKAN_HPP_NOEXCEPT - : semaphoreType( semaphoreType_ ) - , initialValue( initialValue_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreTypeCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo(VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary, uint64_t initialValue_ = {}) VULKAN_HPP_NOEXCEPT + : semaphoreType( semaphoreType_ ), initialValue( initialValue_ ) {} - VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo & operator=( VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo ) - offsetof( SemaphoreTypeCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; SemaphoreTypeCreateInfo( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - SemaphoreTypeCreateInfo& operator=( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SemaphoreTypeCreateInfo & operator=( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SemaphoreTypeCreateInfo & operator=( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SemaphoreTypeCreateInfo ) ); return *this; } @@ -57767,6 +76533,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkSemaphoreTypeCreateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -57777,6 +76544,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SemaphoreTypeCreateInfo const& ) const = default; +#else bool operator==( SemaphoreTypeCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -57789,216 +76560,124 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreTypeCreateInfo; const void* pNext = {}; VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary; uint64_t initialValue = {}; + }; static_assert( sizeof( SemaphoreTypeCreateInfo ) == sizeof( VkSemaphoreTypeCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct SemaphoreWaitInfo + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ = {}, - uint32_t semaphoreCount_ = {}, - const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores_ = {}, - const uint64_t* pValues_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , semaphoreCount( semaphoreCount_ ) - , pSemaphores( pSemaphores_ ) - , pValues( pValues_ ) + using Type = SemaphoreTypeCreateInfo; + }; + using SemaphoreTypeCreateInfoKHR = SemaphoreTypeCreateInfo; + + struct SetStateFlagsIndirectCommandNV + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV(uint32_t data_ = {}) VULKAN_HPP_NOEXCEPT + : data( data_ ) {} - VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & operator=( VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo ) - offsetof( SemaphoreWaitInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SemaphoreWaitInfo( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SetStateFlagsIndirectCommandNV( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - SemaphoreWaitInfo& operator=( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SetStateFlagsIndirectCommandNV & operator=( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SemaphoreWaitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SetStateFlagsIndirectCommandNV & operator=( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + memcpy( static_cast( this ), &rhs, sizeof( SetStateFlagsIndirectCommandNV ) ); return *this; } - SemaphoreWaitInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ ) VULKAN_HPP_NOEXCEPT + SetStateFlagsIndirectCommandNV & setData( uint32_t data_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + data = data_; return *this; } - SemaphoreWaitInfo & setSemaphoreCount( uint32_t semaphoreCount_ ) VULKAN_HPP_NOEXCEPT + + operator VkSetStateFlagsIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT { - semaphoreCount = semaphoreCount_; - return *this; + return *reinterpret_cast( this ); } - SemaphoreWaitInfo & setPSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores_ ) VULKAN_HPP_NOEXCEPT + operator VkSetStateFlagsIndirectCommandNV &() VULKAN_HPP_NOEXCEPT { - pSemaphores = pSemaphores_; - return *this; + return *reinterpret_cast( this ); } - SemaphoreWaitInfo & setPValues( const uint64_t* pValues_ ) VULKAN_HPP_NOEXCEPT + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SetStateFlagsIndirectCommandNV const& ) const = default; +#else + bool operator==( SetStateFlagsIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT { - pValues = pValues_; - return *this; + return ( data == rhs.data ); } - operator VkSemaphoreWaitInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSemaphoreWaitInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( SemaphoreWaitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( semaphoreCount == rhs.semaphoreCount ) - && ( pSemaphores == rhs.pSemaphores ) - && ( pValues == rhs.pValues ); - } - - bool operator!=( SemaphoreWaitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SetStateFlagsIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif + + public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreWaitInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags = {}; - uint32_t semaphoreCount = {}; - const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores = {}; - const uint64_t* pValues = {}; + uint32_t data = {}; + }; - static_assert( sizeof( SemaphoreWaitInfo ) == sizeof( VkSemaphoreWaitInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ShaderModuleCreateInfo - { - VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ = {}, - size_t codeSize_ = {}, - const uint32_t* pCode_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , codeSize( codeSize_ ) - , pCode( pCode_ ) - {} - - VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo ) - offsetof( ShaderModuleCreateInfo, pNext ) ); - return *this; - } - - ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ShaderModuleCreateInfo& operator=( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ShaderModuleCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ShaderModuleCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - ShaderModuleCreateInfo & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT - { - codeSize = codeSize_; - return *this; - } - - ShaderModuleCreateInfo & setPCode( const uint32_t* pCode_ ) VULKAN_HPP_NOEXCEPT - { - pCode = pCode_; - return *this; - } - - operator VkShaderModuleCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkShaderModuleCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( ShaderModuleCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( codeSize == rhs.codeSize ) - && ( pCode == rhs.pCode ); - } - - bool operator!=( ShaderModuleCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags = {}; - size_t codeSize = {}; - const uint32_t* pCode = {}; - }; - static_assert( sizeof( ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SetStateFlagsIndirectCommandNV ) == sizeof( VkSetStateFlagsIndirectCommandNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ShaderModuleValidationCacheCreateInfoEXT { - VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ = {} ) VULKAN_HPP_NOEXCEPT - : validationCache( validationCache_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleValidationCacheCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ = {}) VULKAN_HPP_NOEXCEPT + : validationCache( validationCache_ ) {} - VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT ) - offsetof( ShaderModuleValidationCacheCreateInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - ShaderModuleValidationCacheCreateInfoEXT& operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ShaderModuleValidationCacheCreateInfoEXT & operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ShaderModuleValidationCacheCreateInfoEXT & operator=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ShaderModuleValidationCacheCreateInfoEXT ) ); return *this; } @@ -58014,6 +76693,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkShaderModuleValidationCacheCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -58024,6 +76704,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ShaderModuleValidationCacheCreateInfoEXT const& ) const = default; +#else bool operator==( ShaderModuleValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -58035,40 +76719,55 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleValidationCacheCreateInfoEXT; const void* pNext = {}; VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache = {}; + }; static_assert( sizeof( ShaderModuleValidationCacheCreateInfoEXT ) == sizeof( VkShaderModuleValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = ShaderModuleValidationCacheCreateInfoEXT; + }; + struct ShaderResourceUsageAMD { - ShaderResourceUsageAMD( uint32_t numUsedVgprs_ = {}, - uint32_t numUsedSgprs_ = {}, - uint32_t ldsSizePerLocalWorkGroup_ = {}, - size_t ldsUsageSizeInBytes_ = {}, - size_t scratchMemUsageInBytes_ = {} ) VULKAN_HPP_NOEXCEPT - : numUsedVgprs( numUsedVgprs_ ) - , numUsedSgprs( numUsedSgprs_ ) - , ldsSizePerLocalWorkGroup( ldsSizePerLocalWorkGroup_ ) - , ldsUsageSizeInBytes( ldsUsageSizeInBytes_ ) - , scratchMemUsageInBytes( scratchMemUsageInBytes_ ) + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD(uint32_t numUsedVgprs_ = {}, uint32_t numUsedSgprs_ = {}, uint32_t ldsSizePerLocalWorkGroup_ = {}, size_t ldsUsageSizeInBytes_ = {}, size_t scratchMemUsageInBytes_ = {}) VULKAN_HPP_NOEXCEPT + : numUsedVgprs( numUsedVgprs_ ), numUsedSgprs( numUsedSgprs_ ), ldsSizePerLocalWorkGroup( ldsSizePerLocalWorkGroup_ ), ldsUsageSizeInBytes( ldsUsageSizeInBytes_ ), scratchMemUsageInBytes( scratchMemUsageInBytes_ ) {} + VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ShaderResourceUsageAMD( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - ShaderResourceUsageAMD& operator=( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT + ShaderResourceUsageAMD & operator=( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + ShaderResourceUsageAMD & operator=( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ShaderResourceUsageAMD ) ); + return *this; + } + + operator VkShaderResourceUsageAMD const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -58079,6 +76778,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ShaderResourceUsageAMD const& ) const = default; +#else bool operator==( ShaderResourceUsageAMD const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( numUsedVgprs == rhs.numUsedVgprs ) @@ -58092,6 +76795,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: uint32_t numUsedVgprs = {}; @@ -58099,41 +76805,41 @@ namespace VULKAN_HPP_NAMESPACE uint32_t ldsSizePerLocalWorkGroup = {}; size_t ldsUsageSizeInBytes = {}; size_t scratchMemUsageInBytes = {}; + }; static_assert( sizeof( ShaderResourceUsageAMD ) == sizeof( VkShaderResourceUsageAMD ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ShaderStatisticsInfoAMD { - ShaderStatisticsInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask_ = {}, - VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage_ = {}, - uint32_t numPhysicalVgprs_ = {}, - uint32_t numPhysicalSgprs_ = {}, - uint32_t numAvailableVgprs_ = {}, - uint32_t numAvailableSgprs_ = {}, - std::array const& computeWorkGroupSize_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderStageMask( shaderStageMask_ ) - , resourceUsage( resourceUsage_ ) - , numPhysicalVgprs( numPhysicalVgprs_ ) - , numPhysicalSgprs( numPhysicalSgprs_ ) - , numAvailableVgprs( numAvailableVgprs_ ) - , numAvailableSgprs( numAvailableSgprs_ ) - , computeWorkGroupSize{} - { - VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( computeWorkGroupSize, computeWorkGroupSize_ ); - } + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD(VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask_ = {}, VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage_ = {}, uint32_t numPhysicalVgprs_ = {}, uint32_t numPhysicalSgprs_ = {}, uint32_t numAvailableVgprs_ = {}, uint32_t numAvailableSgprs_ = {}, std::array const& computeWorkGroupSize_ = {}) VULKAN_HPP_NOEXCEPT + : shaderStageMask( shaderStageMask_ ), resourceUsage( resourceUsage_ ), numPhysicalVgprs( numPhysicalVgprs_ ), numPhysicalSgprs( numPhysicalSgprs_ ), numAvailableVgprs( numAvailableVgprs_ ), numAvailableSgprs( numAvailableSgprs_ ), computeWorkGroupSize( computeWorkGroupSize_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; ShaderStatisticsInfoAMD( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - ShaderStatisticsInfoAMD& operator=( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + ShaderStatisticsInfoAMD & operator=( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + ShaderStatisticsInfoAMD & operator=( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ShaderStatisticsInfoAMD ) ); + return *this; + } + + operator VkShaderStatisticsInfoAMD const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -58144,6 +76850,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ShaderStatisticsInfoAMD const& ) const = default; +#else bool operator==( ShaderStatisticsInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( shaderStageMask == rhs.shaderStageMask ) @@ -58152,13 +76862,16 @@ namespace VULKAN_HPP_NAMESPACE && ( numPhysicalSgprs == rhs.numPhysicalSgprs ) && ( numAvailableVgprs == rhs.numAvailableVgprs ) && ( numAvailableSgprs == rhs.numAvailableSgprs ) - && ( memcmp( computeWorkGroupSize, rhs.computeWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 ); + && ( computeWorkGroupSize == rhs.computeWorkGroupSize ); } bool operator!=( ShaderStatisticsInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif + + public: VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask = {}; @@ -58167,34 +76880,43 @@ namespace VULKAN_HPP_NAMESPACE uint32_t numPhysicalSgprs = {}; uint32_t numAvailableVgprs = {}; uint32_t numAvailableSgprs = {}; - uint32_t computeWorkGroupSize[3] = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D computeWorkGroupSize = {}; + }; static_assert( sizeof( ShaderStatisticsInfoAMD ) == sizeof( VkShaderStatisticsInfoAMD ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SharedPresentSurfaceCapabilitiesKHR { - SharedPresentSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT - : sharedPresentSupportedUsageFlags( sharedPresentSupportedUsageFlags_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSharedPresentSurfaceCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR(VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags_ = {}) VULKAN_HPP_NOEXCEPT + : sharedPresentSupportedUsageFlags( sharedPresentSupportedUsageFlags_ ) {} - VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR & operator=( VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR ) - offsetof( SharedPresentSurfaceCapabilitiesKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; SharedPresentSurfaceCapabilitiesKHR( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - SharedPresentSurfaceCapabilitiesKHR& operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + SharedPresentSurfaceCapabilitiesKHR & operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + SharedPresentSurfaceCapabilitiesKHR & operator=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SharedPresentSurfaceCapabilitiesKHR ) ); + return *this; + } + + operator VkSharedPresentSurfaceCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -58205,6 +76927,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SharedPresentSurfaceCapabilitiesKHR const& ) const = default; +#else bool operator==( SharedPresentSurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -58216,255 +76942,53 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR; void* pNext = {}; VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags = {}; + }; static_assert( sizeof( SharedPresentSurfaceCapabilitiesKHR ) == sizeof( VkSharedPresentSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct SparseImageFormatProperties + template <> + struct CppType { - SparseImageFormatProperties( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, - VULKAN_HPP_NAMESPACE::Extent3D imageGranularity_ = {}, - VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT - : aspectMask( aspectMask_ ) - , imageGranularity( imageGranularity_ ) - , flags( flags_ ) - {} - - SparseImageFormatProperties( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SparseImageFormatProperties& operator=( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkSparseImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSparseImageFormatProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( SparseImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( aspectMask == rhs.aspectMask ) - && ( imageGranularity == rhs.imageGranularity ) - && ( flags == rhs.flags ); - } - - bool operator!=( SparseImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; - VULKAN_HPP_NAMESPACE::Extent3D imageGranularity = {}; - VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags = {}; + using Type = SharedPresentSurfaceCapabilitiesKHR; }; - static_assert( sizeof( SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SparseImageFormatProperties2 - { - SparseImageFormatProperties2( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties_ = {} ) VULKAN_HPP_NOEXCEPT - : properties( properties_ ) - {} - - VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 & operator=( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 ) - offsetof( SparseImageFormatProperties2, pNext ) ); - return *this; - } - - SparseImageFormatProperties2( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SparseImageFormatProperties2& operator=( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkSparseImageFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSparseImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( SparseImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( properties == rhs.properties ); - } - - bool operator!=( SparseImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageFormatProperties2; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties = {}; - }; - static_assert( sizeof( SparseImageFormatProperties2 ) == sizeof( VkSparseImageFormatProperties2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SparseImageMemoryRequirements - { - SparseImageMemoryRequirements( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties_ = {}, - uint32_t imageMipTailFirstLod_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride_ = {} ) VULKAN_HPP_NOEXCEPT - : formatProperties( formatProperties_ ) - , imageMipTailFirstLod( imageMipTailFirstLod_ ) - , imageMipTailSize( imageMipTailSize_ ) - , imageMipTailOffset( imageMipTailOffset_ ) - , imageMipTailStride( imageMipTailStride_ ) - {} - - SparseImageMemoryRequirements( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SparseImageMemoryRequirements& operator=( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkSparseImageMemoryRequirements const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSparseImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( SparseImageMemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( formatProperties == rhs.formatProperties ) - && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod ) - && ( imageMipTailSize == rhs.imageMipTailSize ) - && ( imageMipTailOffset == rhs.imageMipTailOffset ) - && ( imageMipTailStride == rhs.imageMipTailStride ); - } - - bool operator!=( SparseImageMemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties = {}; - uint32_t imageMipTailFirstLod = {}; - VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize = {}; - VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride = {}; - }; - static_assert( sizeof( SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SparseImageMemoryRequirements2 - { - SparseImageMemoryRequirements2( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT - : memoryRequirements( memoryRequirements_ ) - {} - - VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 & operator=( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 ) - offsetof( SparseImageMemoryRequirements2, pNext ) ); - return *this; - } - - SparseImageMemoryRequirements2( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SparseImageMemoryRequirements2& operator=( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkSparseImageMemoryRequirements2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSparseImageMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( SparseImageMemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryRequirements == rhs.memoryRequirements ); - } - - bool operator!=( SparseImageMemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageMemoryRequirements2; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements = {}; - }; - static_assert( sizeof( SparseImageMemoryRequirements2 ) == sizeof( VkSparseImageMemoryRequirements2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); #ifdef VK_USE_PLATFORM_GGP - struct StreamDescriptorSurfaceCreateInfoGGP { - VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ = {}, - GgpStreamDescriptor streamDescriptor_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , streamDescriptor( streamDescriptor_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP(VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ = {}, GgpStreamDescriptor streamDescriptor_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), streamDescriptor( streamDescriptor_ ) {} - VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & operator=( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP ) - offsetof( StreamDescriptorSurfaceCreateInfoGGP, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default; StreamDescriptorSurfaceCreateInfoGGP( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - StreamDescriptorSurfaceCreateInfoGGP& operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT + StreamDescriptorSurfaceCreateInfoGGP & operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + StreamDescriptorSurfaceCreateInfoGGP & operator=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( StreamDescriptorSurfaceCreateInfoGGP ) ); return *this; } @@ -58486,6 +77010,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkStreamDescriptorSurfaceCreateInfoGGP const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -58496,243 +77021,70 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( StreamDescriptorSurfaceCreateInfoGGP const& ) const = default; +#else bool operator==( StreamDescriptorSurfaceCreateInfoGGP const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) - && ( streamDescriptor == rhs.streamDescriptor ); + && ( memcmp( &streamDescriptor, &rhs.streamDescriptor, sizeof( GgpStreamDescriptor ) ) == 0 ); } bool operator!=( StreamDescriptorSurfaceCreateInfoGGP const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP; const void* pNext = {}; VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags = {}; GgpStreamDescriptor streamDescriptor = {}; + }; static_assert( sizeof( StreamDescriptorSurfaceCreateInfoGGP ) == sizeof( VkStreamDescriptorSurfaceCreateInfoGGP ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = StreamDescriptorSurfaceCreateInfoGGP; + }; #endif /*VK_USE_PLATFORM_GGP*/ - struct SubmitInfo - { - VULKAN_HPP_CONSTEXPR SubmitInfo( uint32_t waitSemaphoreCount_ = {}, - const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask_ = {}, - uint32_t commandBufferCount_ = {}, - const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers_ = {}, - uint32_t signalSemaphoreCount_ = {}, - const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ = {} ) VULKAN_HPP_NOEXCEPT - : waitSemaphoreCount( waitSemaphoreCount_ ) - , pWaitSemaphores( pWaitSemaphores_ ) - , pWaitDstStageMask( pWaitDstStageMask_ ) - , commandBufferCount( commandBufferCount_ ) - , pCommandBuffers( pCommandBuffers_ ) - , signalSemaphoreCount( signalSemaphoreCount_ ) - , pSignalSemaphores( pSignalSemaphores_ ) - {} - - VULKAN_HPP_NAMESPACE::SubmitInfo & operator=( VULKAN_HPP_NAMESPACE::SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SubmitInfo ) - offsetof( SubmitInfo, pNext ) ); - return *this; - } - - SubmitInfo( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SubmitInfo& operator=( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - SubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT - { - waitSemaphoreCount = waitSemaphoreCount_; - return *this; - } - - SubmitInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT - { - pWaitSemaphores = pWaitSemaphores_; - return *this; - } - - SubmitInfo & setPWaitDstStageMask( const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask_ ) VULKAN_HPP_NOEXCEPT - { - pWaitDstStageMask = pWaitDstStageMask_; - return *this; - } - - SubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT - { - commandBufferCount = commandBufferCount_; - return *this; - } - - SubmitInfo & setPCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers_ ) VULKAN_HPP_NOEXCEPT - { - pCommandBuffers = pCommandBuffers_; - return *this; - } - - SubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT - { - signalSemaphoreCount = signalSemaphoreCount_; - return *this; - } - - SubmitInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT - { - pSignalSemaphores = pSignalSemaphores_; - return *this; - } - - operator VkSubmitInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSubmitInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( SubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) - && ( pWaitSemaphores == rhs.pWaitSemaphores ) - && ( pWaitDstStageMask == rhs.pWaitDstStageMask ) - && ( commandBufferCount == rhs.commandBufferCount ) - && ( pCommandBuffers == rhs.pCommandBuffers ) - && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) - && ( pSignalSemaphores == rhs.pSignalSemaphores ); - } - - bool operator!=( SubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo; - const void* pNext = {}; - uint32_t waitSemaphoreCount = {}; - const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores = {}; - const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask = {}; - uint32_t commandBufferCount = {}; - const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers = {}; - uint32_t signalSemaphoreCount = {}; - const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores = {}; - }; - static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SubpassBeginInfo - { - VULKAN_HPP_CONSTEXPR SubpassBeginInfo( VULKAN_HPP_NAMESPACE::SubpassContents contents_ = VULKAN_HPP_NAMESPACE::SubpassContents::eInline ) VULKAN_HPP_NOEXCEPT - : contents( contents_ ) - {} - - VULKAN_HPP_NAMESPACE::SubpassBeginInfo & operator=( VULKAN_HPP_NAMESPACE::SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SubpassBeginInfo ) - offsetof( SubpassBeginInfo, pNext ) ); - return *this; - } - - SubpassBeginInfo( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SubpassBeginInfo& operator=( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SubpassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - SubpassBeginInfo & setContents( VULKAN_HPP_NAMESPACE::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT - { - contents = contents_; - return *this; - } - - operator VkSubpassBeginInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( SubpassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( contents == rhs.contents ); - } - - bool operator!=( SubpassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassBeginInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline; - }; - static_assert( sizeof( SubpassBeginInfo ) == sizeof( VkSubpassBeginInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct SubpassDescriptionDepthStencilResolve { - VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, - VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilResolveAttachment_ = {} ) VULKAN_HPP_NOEXCEPT - : depthResolveMode( depthResolveMode_ ) - , stencilResolveMode( stencilResolveMode_ ) - , pDepthStencilResolveAttachment( pDepthStencilResolveAttachment_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescriptionDepthStencilResolve; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve(VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilResolveAttachment_ = {}) VULKAN_HPP_NOEXCEPT + : depthResolveMode( depthResolveMode_ ), stencilResolveMode( stencilResolveMode_ ), pDepthStencilResolveAttachment( pDepthStencilResolveAttachment_ ) {} - VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve & operator=( VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve ) - offsetof( SubpassDescriptionDepthStencilResolve, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; SubpassDescriptionDepthStencilResolve( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - SubpassDescriptionDepthStencilResolve& operator=( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT + SubpassDescriptionDepthStencilResolve & operator=( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SubpassDescriptionDepthStencilResolve & operator=( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SubpassDescriptionDepthStencilResolve ) ); return *this; } @@ -58760,6 +77112,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkSubpassDescriptionDepthStencilResolve const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -58770,6 +77123,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SubpassDescriptionDepthStencilResolve const& ) const = default; +#else bool operator==( SubpassDescriptionDepthStencilResolve const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -58783,6 +77140,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescriptionDepthStencilResolve; @@ -58790,313 +77150,46 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone; VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone; const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilResolveAttachment = {}; + }; static_assert( sizeof( SubpassDescriptionDepthStencilResolve ) == sizeof( VkSubpassDescriptionDepthStencilResolve ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct SubpassEndInfo + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR SubpassEndInfo() VULKAN_HPP_NOEXCEPT - {} - - VULKAN_HPP_NAMESPACE::SubpassEndInfo & operator=( VULKAN_HPP_NAMESPACE::SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SubpassEndInfo ) - offsetof( SubpassEndInfo, pNext ) ); - return *this; - } - - SubpassEndInfo( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SubpassEndInfo& operator=( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SubpassEndInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - operator VkSubpassEndInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( SubpassEndInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ); - } - - bool operator!=( SubpassEndInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfo; - const void* pNext = {}; + using Type = SubpassDescriptionDepthStencilResolve; }; - static_assert( sizeof( SubpassEndInfo ) == sizeof( VkSubpassEndInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SurfaceCapabilities2EXT - { - SurfaceCapabilities2EXT( uint32_t minImageCount_ = {}, - uint32_t maxImageCount_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, - uint32_t maxImageArrayLayers_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, - VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, - VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = {} ) VULKAN_HPP_NOEXCEPT - : minImageCount( minImageCount_ ) - , maxImageCount( maxImageCount_ ) - , currentExtent( currentExtent_ ) - , minImageExtent( minImageExtent_ ) - , maxImageExtent( maxImageExtent_ ) - , maxImageArrayLayers( maxImageArrayLayers_ ) - , supportedTransforms( supportedTransforms_ ) - , currentTransform( currentTransform_ ) - , supportedCompositeAlpha( supportedCompositeAlpha_ ) - , supportedUsageFlags( supportedUsageFlags_ ) - , supportedSurfaceCounters( supportedSurfaceCounters_ ) - {} - - VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT & operator=( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT ) - offsetof( SurfaceCapabilities2EXT, pNext ) ); - return *this; - } - - SurfaceCapabilities2EXT( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SurfaceCapabilities2EXT& operator=( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkSurfaceCapabilities2EXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSurfaceCapabilities2EXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( SurfaceCapabilities2EXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( minImageCount == rhs.minImageCount ) - && ( maxImageCount == rhs.maxImageCount ) - && ( currentExtent == rhs.currentExtent ) - && ( minImageExtent == rhs.minImageExtent ) - && ( maxImageExtent == rhs.maxImageExtent ) - && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) - && ( supportedTransforms == rhs.supportedTransforms ) - && ( currentTransform == rhs.currentTransform ) - && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) - && ( supportedUsageFlags == rhs.supportedUsageFlags ) - && ( supportedSurfaceCounters == rhs.supportedSurfaceCounters ); - } - - bool operator!=( SurfaceCapabilities2EXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2EXT; - void* pNext = {}; - uint32_t minImageCount = {}; - uint32_t maxImageCount = {}; - VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {}; - VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {}; - uint32_t maxImageArrayLayers = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; - VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {}; - VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {}; - VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters = {}; - }; - static_assert( sizeof( SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SurfaceCapabilitiesKHR - { - SurfaceCapabilitiesKHR( uint32_t minImageCount_ = {}, - uint32_t maxImageCount_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, - uint32_t maxImageArrayLayers_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, - VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, - VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT - : minImageCount( minImageCount_ ) - , maxImageCount( maxImageCount_ ) - , currentExtent( currentExtent_ ) - , minImageExtent( minImageExtent_ ) - , maxImageExtent( maxImageExtent_ ) - , maxImageArrayLayers( maxImageArrayLayers_ ) - , supportedTransforms( supportedTransforms_ ) - , currentTransform( currentTransform_ ) - , supportedCompositeAlpha( supportedCompositeAlpha_ ) - , supportedUsageFlags( supportedUsageFlags_ ) - {} - - SurfaceCapabilitiesKHR( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SurfaceCapabilitiesKHR& operator=( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkSurfaceCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( SurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( minImageCount == rhs.minImageCount ) - && ( maxImageCount == rhs.maxImageCount ) - && ( currentExtent == rhs.currentExtent ) - && ( minImageExtent == rhs.minImageExtent ) - && ( maxImageExtent == rhs.maxImageExtent ) - && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) - && ( supportedTransforms == rhs.supportedTransforms ) - && ( currentTransform == rhs.currentTransform ) - && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) - && ( supportedUsageFlags == rhs.supportedUsageFlags ); - } - - bool operator!=( SurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - uint32_t minImageCount = {}; - uint32_t maxImageCount = {}; - VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {}; - VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {}; - uint32_t maxImageArrayLayers = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; - VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {}; - VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {}; - }; - static_assert( sizeof( SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SurfaceCapabilities2KHR - { - SurfaceCapabilities2KHR( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities_ = {} ) VULKAN_HPP_NOEXCEPT - : surfaceCapabilities( surfaceCapabilities_ ) - {} - - VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & operator=( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR ) - offsetof( SurfaceCapabilities2KHR, pNext ) ); - return *this; - } - - SurfaceCapabilities2KHR( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SurfaceCapabilities2KHR& operator=( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkSurfaceCapabilities2KHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSurfaceCapabilities2KHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( SurfaceCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( surfaceCapabilities == rhs.surfaceCapabilities ); - } - - bool operator!=( SurfaceCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2KHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities = {}; - }; - static_assert( sizeof( SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + using SubpassDescriptionDepthStencilResolveKHR = SubpassDescriptionDepthStencilResolve; #ifdef VK_USE_PLATFORM_WIN32_KHR - struct SurfaceCapabilitiesFullScreenExclusiveEXT { - VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ = {} ) VULKAN_HPP_NOEXCEPT - : fullScreenExclusiveSupported( fullScreenExclusiveSupported_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT(VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ = {}) VULKAN_HPP_NOEXCEPT + : fullScreenExclusiveSupported( fullScreenExclusiveSupported_ ) {} - VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT ) - offsetof( SurfaceCapabilitiesFullScreenExclusiveEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; SurfaceCapabilitiesFullScreenExclusiveEXT( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - SurfaceCapabilitiesFullScreenExclusiveEXT& operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SurfaceCapabilitiesFullScreenExclusiveEXT ) ); return *this; } @@ -59112,6 +77205,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -59122,6 +77216,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SurfaceCapabilitiesFullScreenExclusiveEXT const& ) const = default; +#else bool operator==( SurfaceCapabilitiesFullScreenExclusiveEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -59133,138 +77231,54 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported = {}; + }; static_assert( sizeof( SurfaceCapabilitiesFullScreenExclusiveEXT ) == sizeof( VkSurfaceCapabilitiesFullScreenExclusiveEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SurfaceCapabilitiesFullScreenExclusiveEXT; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct SurfaceFormatKHR - { - SurfaceFormatKHR( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear ) VULKAN_HPP_NOEXCEPT - : format( format_ ) - , colorSpace( colorSpace_ ) - {} - - SurfaceFormatKHR( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SurfaceFormatKHR& operator=( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkSurfaceFormatKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSurfaceFormatKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( SurfaceFormatKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( format == rhs.format ) - && ( colorSpace == rhs.colorSpace ); - } - - bool operator!=( SurfaceFormatKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear; - }; - static_assert( sizeof( SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SurfaceFormat2KHR - { - SurfaceFormat2KHR( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat_ = {} ) VULKAN_HPP_NOEXCEPT - : surfaceFormat( surfaceFormat_ ) - {} - - VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR & operator=( VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR ) - offsetof( SurfaceFormat2KHR, pNext ) ); - return *this; - } - - SurfaceFormat2KHR( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SurfaceFormat2KHR& operator=( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - operator VkSurfaceFormat2KHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSurfaceFormat2KHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( SurfaceFormat2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( surfaceFormat == rhs.surfaceFormat ); - } - - bool operator!=( SurfaceFormat2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFormat2KHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat = {}; - }; - static_assert( sizeof( SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - #ifdef VK_USE_PLATFORM_WIN32_KHR - struct SurfaceFullScreenExclusiveInfoEXT { - VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault ) VULKAN_HPP_NOEXCEPT - : fullScreenExclusive( fullScreenExclusive_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFullScreenExclusiveInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT(VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault) VULKAN_HPP_NOEXCEPT + : fullScreenExclusive( fullScreenExclusive_ ) {} - VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT & operator=( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT ) - offsetof( SurfaceFullScreenExclusiveInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; SurfaceFullScreenExclusiveInfoEXT( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - SurfaceFullScreenExclusiveInfoEXT& operator=( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SurfaceFullScreenExclusiveInfoEXT & operator=( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SurfaceFullScreenExclusiveInfoEXT & operator=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SurfaceFullScreenExclusiveInfoEXT ) ); return *this; } @@ -59280,6 +77294,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkSurfaceFullScreenExclusiveInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -59290,6 +77305,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SurfaceFullScreenExclusiveInfoEXT const& ) const = default; +#else bool operator==( SurfaceFullScreenExclusiveInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -59301,38 +77320,54 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveInfoEXT; void* pNext = {}; VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault; + }; static_assert( sizeof( SurfaceFullScreenExclusiveInfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SurfaceFullScreenExclusiveInfoEXT; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - struct SurfaceFullScreenExclusiveWin32InfoEXT { - VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( HMONITOR hmonitor_ = {} ) VULKAN_HPP_NOEXCEPT - : hmonitor( hmonitor_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT(HMONITOR hmonitor_ = {}) VULKAN_HPP_NOEXCEPT + : hmonitor( hmonitor_ ) {} - VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT & operator=( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT ) - offsetof( SurfaceFullScreenExclusiveWin32InfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; SurfaceFullScreenExclusiveWin32InfoEXT( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - SurfaceFullScreenExclusiveWin32InfoEXT& operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SurfaceFullScreenExclusiveWin32InfoEXT & operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SurfaceFullScreenExclusiveWin32InfoEXT & operator=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SurfaceFullScreenExclusiveWin32InfoEXT ) ); return *this; } @@ -59348,6 +77383,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkSurfaceFullScreenExclusiveWin32InfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -59358,6 +77394,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SurfaceFullScreenExclusiveWin32InfoEXT const& ) const = default; +#else bool operator==( SurfaceFullScreenExclusiveWin32InfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -59369,36 +77409,53 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT; const void* pNext = {}; HMONITOR hmonitor = {}; + }; static_assert( sizeof( SurfaceFullScreenExclusiveWin32InfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveWin32InfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SurfaceFullScreenExclusiveWin32InfoEXT; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ struct SurfaceProtectedCapabilitiesKHR { - VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ = {} ) VULKAN_HPP_NOEXCEPT - : supportsProtected( supportsProtected_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceProtectedCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR(VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ = {}) VULKAN_HPP_NOEXCEPT + : supportsProtected( supportsProtected_ ) {} - VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR & operator=( VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR ) - offsetof( SurfaceProtectedCapabilitiesKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; SurfaceProtectedCapabilitiesKHR( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - SurfaceProtectedCapabilitiesKHR& operator=( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + SurfaceProtectedCapabilitiesKHR & operator=( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SurfaceProtectedCapabilitiesKHR & operator=( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SurfaceProtectedCapabilitiesKHR ) ); return *this; } @@ -59414,6 +77471,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkSurfaceProtectedCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -59424,6 +77482,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SurfaceProtectedCapabilitiesKHR const& ) const = default; +#else bool operator==( SurfaceProtectedCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -59435,35 +77497,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceProtectedCapabilitiesKHR; const void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 supportsProtected = {}; + }; static_assert( sizeof( SurfaceProtectedCapabilitiesKHR ) == sizeof( VkSurfaceProtectedCapabilitiesKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = SurfaceProtectedCapabilitiesKHR; + }; + struct SwapchainCounterCreateInfoEXT { - VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ = {} ) VULKAN_HPP_NOEXCEPT - : surfaceCounters( surfaceCounters_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCounterCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT(VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ = {}) VULKAN_HPP_NOEXCEPT + : surfaceCounters( surfaceCounters_ ) {} - VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT ) - offsetof( SwapchainCounterCreateInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - SwapchainCounterCreateInfoEXT& operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SwapchainCounterCreateInfoEXT & operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SwapchainCounterCreateInfoEXT & operator=( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SwapchainCounterCreateInfoEXT ) ); return *this; } @@ -59479,6 +77558,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkSwapchainCounterCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -59489,6 +77569,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SwapchainCounterCreateInfoEXT const& ) const = default; +#else bool operator==( SwapchainCounterCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -59500,250 +77584,52 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCounterCreateInfoEXT; const void* pNext = {}; VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters = {}; + }; static_assert( sizeof( SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct SwapchainCreateInfoKHR + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, - uint32_t minImageCount_ = {}, - VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear, - VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}, - uint32_t imageArrayLayers_ = {}, - VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, - VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, - uint32_t queueFamilyIndexCount_ = {}, - const uint32_t* pQueueFamilyIndices_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, - VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, - VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, - VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, - VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , surface( surface_ ) - , minImageCount( minImageCount_ ) - , imageFormat( imageFormat_ ) - , imageColorSpace( imageColorSpace_ ) - , imageExtent( imageExtent_ ) - , imageArrayLayers( imageArrayLayers_ ) - , imageUsage( imageUsage_ ) - , imageSharingMode( imageSharingMode_ ) - , queueFamilyIndexCount( queueFamilyIndexCount_ ) - , pQueueFamilyIndices( pQueueFamilyIndices_ ) - , preTransform( preTransform_ ) - , compositeAlpha( compositeAlpha_ ) - , presentMode( presentMode_ ) - , clipped( clipped_ ) - , oldSwapchain( oldSwapchain_ ) - {} - - VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR ) - offsetof( SwapchainCreateInfoKHR, pNext ) ); - return *this; - } - - SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SwapchainCreateInfoKHR& operator=( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - SwapchainCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - SwapchainCreateInfoKHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT - { - surface = surface_; - return *this; - } - - SwapchainCreateInfoKHR & setMinImageCount( uint32_t minImageCount_ ) VULKAN_HPP_NOEXCEPT - { - minImageCount = minImageCount_; - return *this; - } - - SwapchainCreateInfoKHR & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT - { - imageFormat = imageFormat_; - return *this; - } - - SwapchainCreateInfoKHR & setImageColorSpace( VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ ) VULKAN_HPP_NOEXCEPT - { - imageColorSpace = imageColorSpace_; - return *this; - } - - SwapchainCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ ) VULKAN_HPP_NOEXCEPT - { - imageExtent = imageExtent_; - return *this; - } - - SwapchainCreateInfoKHR & setImageArrayLayers( uint32_t imageArrayLayers_ ) VULKAN_HPP_NOEXCEPT - { - imageArrayLayers = imageArrayLayers_; - return *this; - } - - SwapchainCreateInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT - { - imageUsage = imageUsage_; - return *this; - } - - SwapchainCreateInfoKHR & setImageSharingMode( VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ ) VULKAN_HPP_NOEXCEPT - { - imageSharingMode = imageSharingMode_; - return *this; - } - - SwapchainCreateInfoKHR & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT - { - queueFamilyIndexCount = queueFamilyIndexCount_; - return *this; - } - - SwapchainCreateInfoKHR & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT - { - pQueueFamilyIndices = pQueueFamilyIndices_; - return *this; - } - - SwapchainCreateInfoKHR & setPreTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ ) VULKAN_HPP_NOEXCEPT - { - preTransform = preTransform_; - return *this; - } - - SwapchainCreateInfoKHR & setCompositeAlpha( VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ ) VULKAN_HPP_NOEXCEPT - { - compositeAlpha = compositeAlpha_; - return *this; - } - - SwapchainCreateInfoKHR & setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT - { - presentMode = presentMode_; - return *this; - } - - SwapchainCreateInfoKHR & setClipped( VULKAN_HPP_NAMESPACE::Bool32 clipped_ ) VULKAN_HPP_NOEXCEPT - { - clipped = clipped_; - return *this; - } - - SwapchainCreateInfoKHR & setOldSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ ) VULKAN_HPP_NOEXCEPT - { - oldSwapchain = oldSwapchain_; - return *this; - } - - operator VkSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( SwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( surface == rhs.surface ) - && ( minImageCount == rhs.minImageCount ) - && ( imageFormat == rhs.imageFormat ) - && ( imageColorSpace == rhs.imageColorSpace ) - && ( imageExtent == rhs.imageExtent ) - && ( imageArrayLayers == rhs.imageArrayLayers ) - && ( imageUsage == rhs.imageUsage ) - && ( imageSharingMode == rhs.imageSharingMode ) - && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) - && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) - && ( preTransform == rhs.preTransform ) - && ( compositeAlpha == rhs.compositeAlpha ) - && ( presentMode == rhs.presentMode ) - && ( clipped == rhs.clipped ) - && ( oldSwapchain == rhs.oldSwapchain ); - } - - bool operator!=( SwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags = {}; - VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {}; - uint32_t minImageCount = {}; - VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear; - VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {}; - uint32_t imageArrayLayers = {}; - VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {}; - VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; - uint32_t queueFamilyIndexCount = {}; - const uint32_t* pQueueFamilyIndices = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; - VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque; - VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate; - VULKAN_HPP_NAMESPACE::Bool32 clipped = {}; - VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain = {}; + using Type = SwapchainCounterCreateInfoEXT; }; - static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SwapchainDisplayNativeHdrCreateInfoAMD { - VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ = {} ) VULKAN_HPP_NOEXCEPT - : localDimmingEnable( localDimmingEnable_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD(VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ = {}) VULKAN_HPP_NOEXCEPT + : localDimmingEnable( localDimmingEnable_ ) {} - VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD & operator=( VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD ) - offsetof( SwapchainDisplayNativeHdrCreateInfoAMD, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; SwapchainDisplayNativeHdrCreateInfoAMD( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - SwapchainDisplayNativeHdrCreateInfoAMD& operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + SwapchainDisplayNativeHdrCreateInfoAMD & operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SwapchainDisplayNativeHdrCreateInfoAMD & operator=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( SwapchainDisplayNativeHdrCreateInfoAMD ) ); return *this; } @@ -59759,6 +77645,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkSwapchainDisplayNativeHdrCreateInfoAMD const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -59769,6 +77656,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SwapchainDisplayNativeHdrCreateInfoAMD const& ) const = default; +#else bool operator==( SwapchainDisplayNativeHdrCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -59780,38 +77671,56 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD; const void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable = {}; + }; static_assert( sizeof( SwapchainDisplayNativeHdrCreateInfoAMD ) == sizeof( VkSwapchainDisplayNativeHdrCreateInfoAMD ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = SwapchainDisplayNativeHdrCreateInfoAMD; + }; + struct TextureLODGatherFormatPropertiesAMD { - TextureLODGatherFormatPropertiesAMD( VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD_ = {} ) VULKAN_HPP_NOEXCEPT - : supportsTextureGatherLODBiasAMD( supportsTextureGatherLODBiasAMD_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTextureLodGatherFormatPropertiesAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD(VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD_ = {}) VULKAN_HPP_NOEXCEPT + : supportsTextureGatherLODBiasAMD( supportsTextureGatherLODBiasAMD_ ) {} - VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD & operator=( VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD ) - offsetof( TextureLODGatherFormatPropertiesAMD, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; TextureLODGatherFormatPropertiesAMD( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - TextureLODGatherFormatPropertiesAMD& operator=( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + TextureLODGatherFormatPropertiesAMD & operator=( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } + TextureLODGatherFormatPropertiesAMD & operator=( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( TextureLODGatherFormatPropertiesAMD ) ); + return *this; + } + + operator VkTextureLODGatherFormatPropertiesAMD const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -59822,6 +77731,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( TextureLODGatherFormatPropertiesAMD const& ) const = default; +#else bool operator==( TextureLODGatherFormatPropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -59833,41 +77746,58 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD; void* pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD = {}; + }; static_assert( sizeof( TextureLODGatherFormatPropertiesAMD ) == sizeof( VkTextureLODGatherFormatPropertiesAMD ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = TextureLODGatherFormatPropertiesAMD; + }; + struct TimelineSemaphoreSubmitInfo { - VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo( uint32_t waitSemaphoreValueCount_ = {}, - const uint64_t* pWaitSemaphoreValues_ = {}, - uint32_t signalSemaphoreValueCount_ = {}, - const uint64_t* pSignalSemaphoreValues_ = {} ) VULKAN_HPP_NOEXCEPT - : waitSemaphoreValueCount( waitSemaphoreValueCount_ ) - , pWaitSemaphoreValues( pWaitSemaphoreValues_ ) - , signalSemaphoreValueCount( signalSemaphoreValueCount_ ) - , pSignalSemaphoreValues( pSignalSemaphoreValues_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTimelineSemaphoreSubmitInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo(uint32_t waitSemaphoreValueCount_ = {}, const uint64_t* pWaitSemaphoreValues_ = {}, uint32_t signalSemaphoreValueCount_ = {}, const uint64_t* pSignalSemaphoreValues_ = {}) VULKAN_HPP_NOEXCEPT + : waitSemaphoreValueCount( waitSemaphoreValueCount_ ), pWaitSemaphoreValues( pWaitSemaphoreValues_ ), signalSemaphoreValueCount( signalSemaphoreValueCount_ ), pSignalSemaphoreValues( pSignalSemaphoreValues_ ) {} - VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo & operator=( VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo ) - offsetof( TimelineSemaphoreSubmitInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; TimelineSemaphoreSubmitInfo( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - TimelineSemaphoreSubmitInfo& operator=( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + TimelineSemaphoreSubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ = {} ) + : waitSemaphoreValueCount( static_cast( waitSemaphoreValues_.size() ) ), pWaitSemaphoreValues( waitSemaphoreValues_.data() ), signalSemaphoreValueCount( static_cast( signalSemaphoreValues_.size() ) ), pSignalSemaphoreValues( signalSemaphoreValues_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + TimelineSemaphoreSubmitInfo & operator=( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + TimelineSemaphoreSubmitInfo & operator=( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( TimelineSemaphoreSubmitInfo ) ); return *this; } @@ -59889,6 +77819,15 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + TimelineSemaphoreSubmitInfo & setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreValueCount = static_cast( waitSemaphoreValues_.size() ); + pWaitSemaphoreValues = waitSemaphoreValues_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + TimelineSemaphoreSubmitInfo & setSignalSemaphoreValueCount( uint32_t signalSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT { signalSemaphoreValueCount = signalSemaphoreValueCount_; @@ -59901,6 +77840,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + TimelineSemaphoreSubmitInfo & setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreValueCount = static_cast( signalSemaphoreValues_.size() ); + pSignalSemaphoreValues = signalSemaphoreValues_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkTimelineSemaphoreSubmitInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -59911,6 +77860,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( TimelineSemaphoreSubmitInfo const& ) const = default; +#else bool operator==( TimelineSemaphoreSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -59925,6 +77878,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTimelineSemaphoreSubmitInfo; @@ -59933,121 +77889,145 @@ namespace VULKAN_HPP_NAMESPACE const uint64_t* pWaitSemaphoreValues = {}; uint32_t signalSemaphoreValueCount = {}; const uint64_t* pSignalSemaphoreValues = {}; + }; static_assert( sizeof( TimelineSemaphoreSubmitInfo ) == sizeof( VkTimelineSemaphoreSubmitInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct ValidationCacheCreateInfoEXT + template <> + struct CppType { - VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ = {}, - size_t initialDataSize_ = {}, - const void* pInitialData_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , initialDataSize( initialDataSize_ ) - , pInitialData( pInitialData_ ) + using Type = TimelineSemaphoreSubmitInfo; + }; + using TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo; + +#ifdef VK_ENABLE_BETA_EXTENSIONS + struct TraceRaysIndirectCommandKHR + { + + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR(uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {}) VULKAN_HPP_NOEXCEPT + : width( width_ ), height( height_ ), depth( depth_ ) {} - VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT ) - offsetof( ValidationCacheCreateInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + TraceRaysIndirectCommandKHR( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - ValidationCacheCreateInfoEXT& operator=( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + explicit TraceRaysIndirectCommandKHR( Extent2D const& extent2D, uint32_t depth_ = {} ) + : width( extent2D.width ) + , height( extent2D.height ) + , depth( depth_ ) + {} +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + TraceRaysIndirectCommandKHR & operator=( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ValidationCacheCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + TraceRaysIndirectCommandKHR & operator=( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + memcpy( static_cast( this ), &rhs, sizeof( TraceRaysIndirectCommandKHR ) ); return *this; } - ValidationCacheCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + TraceRaysIndirectCommandKHR & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + width = width_; return *this; } - ValidationCacheCreateInfoEXT & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT + TraceRaysIndirectCommandKHR & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT { - initialDataSize = initialDataSize_; + height = height_; return *this; } - ValidationCacheCreateInfoEXT & setPInitialData( const void* pInitialData_ ) VULKAN_HPP_NOEXCEPT + TraceRaysIndirectCommandKHR & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT { - pInitialData = pInitialData_; + depth = depth_; return *this; } - operator VkValidationCacheCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + + operator VkTraceRaysIndirectCommandKHR const&() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkTraceRaysIndirectCommandKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - bool operator==( ValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( TraceRaysIndirectCommandKHR const& ) const = default; +#else + bool operator==( TraceRaysIndirectCommandKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( initialDataSize == rhs.initialDataSize ) - && ( pInitialData == rhs.pInitialData ); + return ( width == rhs.width ) + && ( height == rhs.height ) + && ( depth == rhs.depth ); } - bool operator!=( ValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( TraceRaysIndirectCommandKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif + + public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationCacheCreateInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags = {}; - size_t initialDataSize = {}; - const void* pInitialData = {}; + uint32_t width = {}; + uint32_t height = {}; + uint32_t depth = {}; + }; - static_assert( sizeof( ValidationCacheCreateInfoEXT ) == sizeof( VkValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( TraceRaysIndirectCommandKHR ) == sizeof( VkTraceRaysIndirectCommandKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct ValidationFeaturesEXT { - VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( uint32_t enabledValidationFeatureCount_ = {}, - const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT* pEnabledValidationFeatures_ = {}, - uint32_t disabledValidationFeatureCount_ = {}, - const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT* pDisabledValidationFeatures_ = {} ) VULKAN_HPP_NOEXCEPT - : enabledValidationFeatureCount( enabledValidationFeatureCount_ ) - , pEnabledValidationFeatures( pEnabledValidationFeatures_ ) - , disabledValidationFeatureCount( disabledValidationFeatureCount_ ) - , pDisabledValidationFeatures( pDisabledValidationFeatures_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT(uint32_t enabledValidationFeatureCount_ = {}, const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT* pEnabledValidationFeatures_ = {}, uint32_t disabledValidationFeatureCount_ = {}, const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT* pDisabledValidationFeatures_ = {}) VULKAN_HPP_NOEXCEPT + : enabledValidationFeatureCount( enabledValidationFeatureCount_ ), pEnabledValidationFeatures( pEnabledValidationFeatures_ ), disabledValidationFeatureCount( disabledValidationFeatureCount_ ), pDisabledValidationFeatures( pDisabledValidationFeatures_ ) {} - VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT ) - offsetof( ValidationFeaturesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ValidationFeaturesEXT( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - ValidationFeaturesEXT& operator=( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + ValidationFeaturesEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & enabledValidationFeatures_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & disabledValidationFeatures_ = {} ) + : enabledValidationFeatureCount( static_cast( enabledValidationFeatures_.size() ) ), pEnabledValidationFeatures( enabledValidationFeatures_.data() ), disabledValidationFeatureCount( static_cast( disabledValidationFeatures_.size() ) ), pDisabledValidationFeatures( disabledValidationFeatures_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ValidationFeaturesEXT & operator=( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ValidationFeaturesEXT & operator=( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ValidationFeaturesEXT ) ); return *this; } @@ -60069,6 +78049,15 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + ValidationFeaturesEXT & setEnabledValidationFeatures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & enabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT + { + enabledValidationFeatureCount = static_cast( enabledValidationFeatures_.size() ); + pEnabledValidationFeatures = enabledValidationFeatures_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + ValidationFeaturesEXT & setDisabledValidationFeatureCount( uint32_t disabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT { disabledValidationFeatureCount = disabledValidationFeatureCount_; @@ -60081,6 +78070,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + ValidationFeaturesEXT & setDisabledValidationFeatures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & disabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT + { + disabledValidationFeatureCount = static_cast( disabledValidationFeatures_.size() ); + pDisabledValidationFeatures = disabledValidationFeatures_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkValidationFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -60091,6 +78090,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ValidationFeaturesEXT const& ) const = default; +#else bool operator==( ValidationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -60105,6 +78108,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFeaturesEXT; @@ -60113,32 +78119,50 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT* pEnabledValidationFeatures = {}; uint32_t disabledValidationFeatureCount = {}; const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT* pDisabledValidationFeatures = {}; + }; static_assert( sizeof( ValidationFeaturesEXT ) == sizeof( VkValidationFeaturesEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = ValidationFeaturesEXT; + }; + struct ValidationFlagsEXT { - VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = {}, - const VULKAN_HPP_NAMESPACE::ValidationCheckEXT* pDisabledValidationChecks_ = {} ) VULKAN_HPP_NOEXCEPT - : disabledValidationCheckCount( disabledValidationCheckCount_ ) - , pDisabledValidationChecks( pDisabledValidationChecks_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFlagsEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ValidationFlagsEXT(uint32_t disabledValidationCheckCount_ = {}, const VULKAN_HPP_NAMESPACE::ValidationCheckEXT* pDisabledValidationChecks_ = {}) VULKAN_HPP_NOEXCEPT + : disabledValidationCheckCount( disabledValidationCheckCount_ ), pDisabledValidationChecks( pDisabledValidationChecks_ ) {} - VULKAN_HPP_NAMESPACE::ValidationFlagsEXT & operator=( VULKAN_HPP_NAMESPACE::ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ValidationFlagsEXT ) - offsetof( ValidationFlagsEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ValidationFlagsEXT( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - ValidationFlagsEXT& operator=( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + ValidationFlagsEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & disabledValidationChecks_ ) + : disabledValidationCheckCount( static_cast( disabledValidationChecks_.size() ) ), pDisabledValidationChecks( disabledValidationChecks_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + ValidationFlagsEXT & operator=( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ValidationFlagsEXT & operator=( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ValidationFlagsEXT ) ); return *this; } @@ -60160,6 +78184,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + ValidationFlagsEXT & setDisabledValidationChecks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & disabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT + { + disabledValidationCheckCount = static_cast( disabledValidationChecks_.size() ); + pDisabledValidationChecks = disabledValidationChecks_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkValidationFlagsEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -60170,6 +78204,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ValidationFlagsEXT const& ) const = default; +#else bool operator==( ValidationFlagsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -60182,40 +78220,54 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFlagsEXT; const void* pNext = {}; uint32_t disabledValidationCheckCount = {}; const VULKAN_HPP_NAMESPACE::ValidationCheckEXT* pDisabledValidationChecks = {}; + }; static_assert( sizeof( ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#ifdef VK_USE_PLATFORM_VI_NN + template <> + struct CppType + { + using Type = ValidationFlagsEXT; + }; +#ifdef VK_USE_PLATFORM_VI_NN struct ViSurfaceCreateInfoNN { - VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ = {}, - void* window_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , window( window_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eViSurfaceCreateInfoNN; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN(VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ = {}, void* window_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), window( window_ ) {} - VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & operator=( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN ) - offsetof( ViSurfaceCreateInfoNN, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default; ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - ViSurfaceCreateInfoNN& operator=( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT + ViSurfaceCreateInfoNN & operator=( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ViSurfaceCreateInfoNN & operator=( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( ViSurfaceCreateInfoNN ) ); return *this; } @@ -60237,6 +78289,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkViSurfaceCreateInfoNN const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -60247,6 +78300,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ViSurfaceCreateInfoNN const& ) const = default; +#else bool operator==( ViSurfaceCreateInfoNN const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -60259,43 +78316,55 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eViSurfaceCreateInfoNN; const void* pNext = {}; VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags = {}; void* window = {}; + }; static_assert( sizeof( ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ViSurfaceCreateInfoNN; + }; #endif /*VK_USE_PLATFORM_VI_NN*/ #ifdef VK_USE_PLATFORM_WAYLAND_KHR - struct WaylandSurfaceCreateInfoKHR { - VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ = {}, - struct wl_display* display_ = {}, - struct wl_surface* surface_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , display( display_ ) - , surface( surface_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWaylandSurfaceCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ = {}, struct wl_display* display_ = {}, struct wl_surface* surface_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), display( display_ ), surface( surface_ ) {} - VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR ) - offsetof( WaylandSurfaceCreateInfoKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - WaylandSurfaceCreateInfoKHR& operator=( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + WaylandSurfaceCreateInfoKHR & operator=( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + WaylandSurfaceCreateInfoKHR & operator=( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( WaylandSurfaceCreateInfoKHR ) ); return *this; } @@ -60323,6 +78392,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkWaylandSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -60333,6 +78403,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( WaylandSurfaceCreateInfoKHR const& ) const = default; +#else bool operator==( WaylandSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -60346,6 +78420,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWaylandSurfaceCreateInfoKHR; @@ -60353,45 +78430,80 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags = {}; struct wl_display* display = {}; struct wl_surface* surface = {}; + }; static_assert( sizeof( WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = WaylandSurfaceCreateInfoKHR; + }; #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - struct Win32KeyedMutexAcquireReleaseInfoKHR { - VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( uint32_t acquireCount_ = {}, - const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ = {}, - const uint64_t* pAcquireKeys_ = {}, - const uint32_t* pAcquireTimeouts_ = {}, - uint32_t releaseCount_ = {}, - const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ = {}, - const uint64_t* pReleaseKeys_ = {} ) VULKAN_HPP_NOEXCEPT - : acquireCount( acquireCount_ ) - , pAcquireSyncs( pAcquireSyncs_ ) - , pAcquireKeys( pAcquireKeys_ ) - , pAcquireTimeouts( pAcquireTimeouts_ ) - , releaseCount( releaseCount_ ) - , pReleaseSyncs( pReleaseSyncs_ ) - , pReleaseKeys( pReleaseKeys_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR(uint32_t acquireCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ = {}, const uint64_t* pAcquireKeys_ = {}, const uint32_t* pAcquireTimeouts_ = {}, uint32_t releaseCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ = {}, const uint64_t* pReleaseKeys_ = {}) VULKAN_HPP_NOEXCEPT + : acquireCount( acquireCount_ ), pAcquireSyncs( pAcquireSyncs_ ), pAcquireKeys( pAcquireKeys_ ), pAcquireTimeouts( pAcquireTimeouts_ ), releaseCount( releaseCount_ ), pReleaseSyncs( pReleaseSyncs_ ), pReleaseKeys( pReleaseKeys_ ) {} - VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR & operator=( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR ) - offsetof( Win32KeyedMutexAcquireReleaseInfoKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; Win32KeyedMutexAcquireReleaseInfoKHR( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - Win32KeyedMutexAcquireReleaseInfoKHR& operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + Win32KeyedMutexAcquireReleaseInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireSyncs_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireKeys_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireTimeouts_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseSyncs_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseKeys_ = {} ) + : acquireCount( static_cast( acquireSyncs_.size() ) ), pAcquireSyncs( acquireSyncs_.data() ), pAcquireKeys( acquireKeys_.data() ), pAcquireTimeouts( acquireTimeouts_.data() ), releaseCount( static_cast( releaseSyncs_.size() ) ), pReleaseSyncs( releaseSyncs_.data() ), pReleaseKeys( releaseKeys_.data() ) { - *this = *reinterpret_cast(&rhs); +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireKeys_.size() ); + VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireTimeouts_.size() ); + VULKAN_HPP_ASSERT( acquireKeys_.size() == acquireTimeouts_.size() ); +#else + if ( acquireSyncs_.size() != acquireKeys_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireKeys_.size()" ); + } + if ( acquireSyncs_.size() != acquireTimeouts_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireTimeouts_.size()" ); + } + if ( acquireKeys_.size() != acquireTimeouts_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireKeys_.size() != acquireTimeouts_.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( releaseSyncs_.size() == releaseKeys_.size() ); +#else + if ( releaseSyncs_.size() != releaseKeys_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: releaseSyncs_.size() != releaseKeys_.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + Win32KeyedMutexAcquireReleaseInfoKHR & operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoKHR & operator=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) ); return *this; } @@ -60413,18 +78525,45 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireSyncs_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireSyncs_.size() ); + pAcquireSyncs = acquireSyncs_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireKeys( const uint64_t* pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT { pAcquireKeys = pAcquireKeys_; return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireKeys_.size() ); + pAcquireKeys = acquireKeys_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireTimeouts( const uint32_t* pAcquireTimeouts_ ) VULKAN_HPP_NOEXCEPT { pAcquireTimeouts = pAcquireTimeouts_; return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireTimeouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireTimeouts_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireTimeouts_.size() ); + pAcquireTimeouts = acquireTimeouts_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT { releaseCount = releaseCount_; @@ -60437,12 +78576,31 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseSyncs_ ) VULKAN_HPP_NOEXCEPT + { + releaseCount = static_cast( releaseSyncs_.size() ); + pReleaseSyncs = releaseSyncs_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseKeys( const uint64_t* pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT { pReleaseKeys = pReleaseKeys_; return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT + { + releaseCount = static_cast( releaseKeys_.size() ); + pReleaseKeys = releaseKeys_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkWin32KeyedMutexAcquireReleaseInfoKHR const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -60453,6 +78611,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Win32KeyedMutexAcquireReleaseInfoKHR const& ) const = default; +#else bool operator==( Win32KeyedMutexAcquireReleaseInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -60470,6 +78632,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR; @@ -60481,45 +78646,80 @@ namespace VULKAN_HPP_NAMESPACE uint32_t releaseCount = {}; const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs = {}; const uint64_t* pReleaseKeys = {}; + }; static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = Win32KeyedMutexAcquireReleaseInfoKHR; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - struct Win32KeyedMutexAcquireReleaseInfoNV { - VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( uint32_t acquireCount_ = {}, - const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ = {}, - const uint64_t* pAcquireKeys_ = {}, - const uint32_t* pAcquireTimeoutMilliseconds_ = {}, - uint32_t releaseCount_ = {}, - const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ = {}, - const uint64_t* pReleaseKeys_ = {} ) VULKAN_HPP_NOEXCEPT - : acquireCount( acquireCount_ ) - , pAcquireSyncs( pAcquireSyncs_ ) - , pAcquireKeys( pAcquireKeys_ ) - , pAcquireTimeoutMilliseconds( pAcquireTimeoutMilliseconds_ ) - , releaseCount( releaseCount_ ) - , pReleaseSyncs( pReleaseSyncs_ ) - , pReleaseKeys( pReleaseKeys_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV(uint32_t acquireCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ = {}, const uint64_t* pAcquireKeys_ = {}, const uint32_t* pAcquireTimeoutMilliseconds_ = {}, uint32_t releaseCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ = {}, const uint64_t* pReleaseKeys_ = {}) VULKAN_HPP_NOEXCEPT + : acquireCount( acquireCount_ ), pAcquireSyncs( pAcquireSyncs_ ), pAcquireKeys( pAcquireKeys_ ), pAcquireTimeoutMilliseconds( pAcquireTimeoutMilliseconds_ ), releaseCount( releaseCount_ ), pReleaseSyncs( pReleaseSyncs_ ), pReleaseKeys( pReleaseKeys_ ) {} - VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV & operator=( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV ) - offsetof( Win32KeyedMutexAcquireReleaseInfoNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - Win32KeyedMutexAcquireReleaseInfoNV& operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + Win32KeyedMutexAcquireReleaseInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireSyncs_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireKeys_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireTimeoutMilliseconds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseSyncs_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseKeys_ = {} ) + : acquireCount( static_cast( acquireSyncs_.size() ) ), pAcquireSyncs( acquireSyncs_.data() ), pAcquireKeys( acquireKeys_.data() ), pAcquireTimeoutMilliseconds( acquireTimeoutMilliseconds_.data() ), releaseCount( static_cast( releaseSyncs_.size() ) ), pReleaseSyncs( releaseSyncs_.data() ), pReleaseKeys( releaseKeys_.data() ) { - *this = *reinterpret_cast(&rhs); +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireKeys_.size() ); + VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireTimeoutMilliseconds_.size() ); + VULKAN_HPP_ASSERT( acquireKeys_.size() == acquireTimeoutMilliseconds_.size() ); +#else + if ( acquireSyncs_.size() != acquireKeys_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireKeys_.size()" ); + } + if ( acquireSyncs_.size() != acquireTimeoutMilliseconds_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireTimeoutMilliseconds_.size()" ); + } + if ( acquireKeys_.size() != acquireTimeoutMilliseconds_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireKeys_.size() != acquireTimeoutMilliseconds_.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( releaseSyncs_.size() == releaseKeys_.size() ); +#else + if ( releaseSyncs_.size() != releaseKeys_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: releaseSyncs_.size() != releaseKeys_.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + Win32KeyedMutexAcquireReleaseInfoNV & operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoNV & operator=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) ); return *this; } @@ -60541,18 +78741,45 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + Win32KeyedMutexAcquireReleaseInfoNV & setAcquireSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireSyncs_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireSyncs_.size() ); + pAcquireSyncs = acquireSyncs_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireKeys( const uint64_t* pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT { pAcquireKeys = pAcquireKeys_; return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + Win32KeyedMutexAcquireReleaseInfoNV & setAcquireKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireKeys_.size() ); + pAcquireKeys = acquireKeys_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireTimeoutMilliseconds( const uint32_t* pAcquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT { pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_; return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + Win32KeyedMutexAcquireReleaseInfoNV & setAcquireTimeoutMilliseconds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireTimeoutMilliseconds_.size() ); + pAcquireTimeoutMilliseconds = acquireTimeoutMilliseconds_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + Win32KeyedMutexAcquireReleaseInfoNV & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT { releaseCount = releaseCount_; @@ -60565,12 +78792,31 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + Win32KeyedMutexAcquireReleaseInfoNV & setReleaseSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseSyncs_ ) VULKAN_HPP_NOEXCEPT + { + releaseCount = static_cast( releaseSyncs_.size() ); + pReleaseSyncs = releaseSyncs_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseKeys( const uint64_t* pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT { pReleaseKeys = pReleaseKeys_; return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + Win32KeyedMutexAcquireReleaseInfoNV & setReleaseKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT + { + releaseCount = static_cast( releaseKeys_.size() ); + pReleaseKeys = releaseKeys_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkWin32KeyedMutexAcquireReleaseInfoNV const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -60581,6 +78827,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Win32KeyedMutexAcquireReleaseInfoNV const& ) const = default; +#else bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -60598,6 +78848,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV; @@ -60609,37 +78862,46 @@ namespace VULKAN_HPP_NAMESPACE uint32_t releaseCount = {}; const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs = {}; const uint64_t* pReleaseKeys = {}; + }; static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = Win32KeyedMutexAcquireReleaseInfoNV; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - struct Win32SurfaceCreateInfoKHR { - VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ = {}, - HINSTANCE hinstance_ = {}, - HWND hwnd_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , hinstance( hinstance_ ) - , hwnd( hwnd_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32SurfaceCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ = {}, HINSTANCE hinstance_ = {}, HWND hwnd_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), hinstance( hinstance_ ), hwnd( hwnd_ ) {} - VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR ) - offsetof( Win32SurfaceCreateInfoKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - Win32SurfaceCreateInfoKHR& operator=( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + Win32SurfaceCreateInfoKHR & operator=( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + Win32SurfaceCreateInfoKHR & operator=( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( Win32SurfaceCreateInfoKHR ) ); return *this; } @@ -60667,6 +78929,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkWin32SurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -60677,6 +78940,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Win32SurfaceCreateInfoKHR const& ) const = default; +#else bool operator==( Win32SurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -60690,6 +78957,9 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32SurfaceCreateInfoKHR; @@ -60697,200 +78967,97 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags = {}; HINSTANCE hinstance = {}; HWND hwnd = {}; + }; static_assert( sizeof( Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = Win32SurfaceCreateInfoKHR; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct WriteDescriptorSet + struct WriteDescriptorSetAccelerationStructureKHR { - VULKAN_HPP_CONSTEXPR WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, - uint32_t dstBinding_ = {}, - uint32_t dstArrayElement_ = {}, - uint32_t descriptorCount_ = {}, - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, - const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ = {}, - const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ = {} ) VULKAN_HPP_NOEXCEPT - : dstSet( dstSet_ ) - , dstBinding( dstBinding_ ) - , dstArrayElement( dstArrayElement_ ) - , descriptorCount( descriptorCount_ ) - , descriptorType( descriptorType_ ) - , pImageInfo( pImageInfo_ ) - , pBufferInfo( pBufferInfo_ ) - , pTexelBufferView( pTexelBufferView_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetAccelerationStructureKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR(uint32_t accelerationStructureCount_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures_ = {}) VULKAN_HPP_NOEXCEPT + : accelerationStructureCount( accelerationStructureCount_ ), pAccelerationStructures( pAccelerationStructures_ ) {} - VULKAN_HPP_NAMESPACE::WriteDescriptorSet & operator=( VULKAN_HPP_NAMESPACE::WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSet ) - offsetof( WriteDescriptorSet, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSetAccelerationStructureKHR( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - WriteDescriptorSet& operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + WriteDescriptorSetAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & accelerationStructures_ ) + : accelerationStructureCount( static_cast( accelerationStructures_.size() ) ), pAccelerationStructures( accelerationStructures_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + WriteDescriptorSetAccelerationStructureKHR & operator=( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - WriteDescriptorSet & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSetAccelerationStructureKHR & operator=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( WriteDescriptorSetAccelerationStructureKHR ) ); + return *this; + } + + WriteDescriptorSetAccelerationStructureKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - WriteDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT - { - dstSet = dstSet_; - return *this; - } - - WriteDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT - { - dstBinding = dstBinding_; - return *this; - } - - WriteDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT - { - dstArrayElement = dstArrayElement_; - return *this; - } - - WriteDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT - { - descriptorCount = descriptorCount_; - return *this; - } - - WriteDescriptorSet & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT - { - descriptorType = descriptorType_; - return *this; - } - - WriteDescriptorSet & setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ ) VULKAN_HPP_NOEXCEPT - { - pImageInfo = pImageInfo_; - return *this; - } - - WriteDescriptorSet & setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ ) VULKAN_HPP_NOEXCEPT - { - pBufferInfo = pBufferInfo_; - return *this; - } - - WriteDescriptorSet & setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT - { - pTexelBufferView = pTexelBufferView_; - return *this; - } - - operator VkWriteDescriptorSet const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - bool operator==( WriteDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( dstSet == rhs.dstSet ) - && ( dstBinding == rhs.dstBinding ) - && ( dstArrayElement == rhs.dstArrayElement ) - && ( descriptorCount == rhs.descriptorCount ) - && ( descriptorType == rhs.descriptorType ) - && ( pImageInfo == rhs.pImageInfo ) - && ( pBufferInfo == rhs.pBufferInfo ) - && ( pTexelBufferView == rhs.pTexelBufferView ); - } - - bool operator!=( WriteDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {}; - uint32_t dstBinding = {}; - uint32_t dstArrayElement = {}; - uint32_t descriptorCount = {}; - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; - const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo = {}; - const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo = {}; - const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView = {}; - }; - static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct WriteDescriptorSetAccelerationStructureNV - { - VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( uint32_t accelerationStructureCount_ = {}, - const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT - : accelerationStructureCount( accelerationStructureCount_ ) - , pAccelerationStructures( pAccelerationStructures_ ) - {} - - VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV & operator=( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV ) - offsetof( WriteDescriptorSetAccelerationStructureNV, pNext ) ); - return *this; - } - - WriteDescriptorSetAccelerationStructureNV( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - WriteDescriptorSetAccelerationStructureNV& operator=( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - WriteDescriptorSetAccelerationStructureNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - WriteDescriptorSetAccelerationStructureNV & setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSetAccelerationStructureKHR & setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT { accelerationStructureCount = accelerationStructureCount_; return *this; } - WriteDescriptorSetAccelerationStructureNV & setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSetAccelerationStructureKHR & setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT { pAccelerationStructures = pAccelerationStructures_; return *this; } - operator VkWriteDescriptorSetAccelerationStructureNV const&() const VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + WriteDescriptorSetAccelerationStructureKHR & setAccelerationStructures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & accelerationStructures_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + accelerationStructureCount = static_cast( accelerationStructures_.size() ); + pAccelerationStructures = accelerationStructures_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + + operator VkWriteDescriptorSetAccelerationStructureKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } - operator VkWriteDescriptorSetAccelerationStructureNV &() VULKAN_HPP_NOEXCEPT + operator VkWriteDescriptorSetAccelerationStructureKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - bool operator==( WriteDescriptorSetAccelerationStructureNV const& rhs ) const VULKAN_HPP_NOEXCEPT + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( WriteDescriptorSetAccelerationStructureKHR const& ) const = default; +#else + bool operator==( WriteDescriptorSetAccelerationStructureKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) @@ -60898,42 +79065,65 @@ namespace VULKAN_HPP_NAMESPACE && ( pAccelerationStructures == rhs.pAccelerationStructures ); } - bool operator!=( WriteDescriptorSetAccelerationStructureNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( WriteDescriptorSetAccelerationStructureKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif + + public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureNV; + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureKHR; const void* pNext = {}; uint32_t accelerationStructureCount = {}; - const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures = {}; + }; - static_assert( sizeof( WriteDescriptorSetAccelerationStructureNV ) == sizeof( VkWriteDescriptorSetAccelerationStructureNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( WriteDescriptorSetAccelerationStructureKHR ) == sizeof( VkWriteDescriptorSetAccelerationStructureKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = WriteDescriptorSetAccelerationStructureKHR; + }; + using WriteDescriptorSetAccelerationStructureNV = WriteDescriptorSetAccelerationStructureKHR; struct WriteDescriptorSetInlineUniformBlockEXT { - VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlockEXT( uint32_t dataSize_ = {}, - const void* pData_ = {} ) VULKAN_HPP_NOEXCEPT - : dataSize( dataSize_ ) - , pData( pData_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetInlineUniformBlockEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlockEXT(uint32_t dataSize_ = {}, const void* pData_ = {}) VULKAN_HPP_NOEXCEPT + : dataSize( dataSize_ ), pData( pData_ ) {} - VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlockEXT & operator=( VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlockEXT ) - offsetof( WriteDescriptorSetInlineUniformBlockEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlockEXT( WriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; WriteDescriptorSetInlineUniformBlockEXT( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } - WriteDescriptorSetInlineUniformBlockEXT& operator=( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + template + WriteDescriptorSetInlineUniformBlockEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) + : dataSize( static_cast( data_.size() * sizeof(T) ) ), pData( data_.data() ) + {} +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + WriteDescriptorSetInlineUniformBlockEXT & operator=( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + WriteDescriptorSetInlineUniformBlockEXT & operator=( WriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( WriteDescriptorSetInlineUniformBlockEXT ) ); return *this; } @@ -60955,6 +79145,17 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + template + WriteDescriptorSetInlineUniformBlockEXT & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = static_cast( data_.size() * sizeof(T) ); + pData = data_.data(); + return *this; + } +#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + + operator VkWriteDescriptorSetInlineUniformBlockEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -60965,6 +79166,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( WriteDescriptorSetInlineUniformBlockEXT const& ) const = default; +#else bool operator==( WriteDescriptorSetInlineUniformBlockEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) @@ -60977,42 +79182,54 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetInlineUniformBlockEXT; const void* pNext = {}; uint32_t dataSize = {}; const void* pData = {}; + }; static_assert( sizeof( WriteDescriptorSetInlineUniformBlockEXT ) == sizeof( VkWriteDescriptorSetInlineUniformBlockEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#ifdef VK_USE_PLATFORM_XCB_KHR + template <> + struct CppType + { + using Type = WriteDescriptorSetInlineUniformBlockEXT; + }; +#ifdef VK_USE_PLATFORM_XCB_KHR struct XcbSurfaceCreateInfoKHR { - VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ = {}, - xcb_connection_t* connection_ = {}, - xcb_window_t window_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , connection( connection_ ) - , window( window_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXcbSurfaceCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ = {}, xcb_connection_t* connection_ = {}, xcb_window_t window_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), connection( connection_ ), window( window_ ) {} - VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR ) - offsetof( XcbSurfaceCreateInfoKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - XcbSurfaceCreateInfoKHR& operator=( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + XcbSurfaceCreateInfoKHR & operator=( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + XcbSurfaceCreateInfoKHR & operator=( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( XcbSurfaceCreateInfoKHR ) ); return *this; } @@ -61040,6 +79257,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkXcbSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -61050,19 +79268,26 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( XcbSurfaceCreateInfoKHR const& ) const = default; +#else bool operator==( XcbSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( connection == rhs.connection ) - && ( window == rhs.window ); + && ( memcmp( &window, &rhs.window, sizeof( xcb_window_t ) ) == 0 ); } bool operator!=( XcbSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXcbSurfaceCreateInfoKHR; @@ -61070,37 +79295,46 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags = {}; xcb_connection_t* connection = {}; xcb_window_t window = {}; + }; static_assert( sizeof( XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = XcbSurfaceCreateInfoKHR; + }; #endif /*VK_USE_PLATFORM_XCB_KHR*/ #ifdef VK_USE_PLATFORM_XLIB_KHR - struct XlibSurfaceCreateInfoKHR { - VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ = {}, - Display* dpy_ = {}, - Window window_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , dpy( dpy_ ) - , window( window_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXlibSurfaceCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ = {}, Display* dpy_ = {}, Window window_ = {}) VULKAN_HPP_NOEXCEPT + : flags( flags_ ), dpy( dpy_ ), window( window_ ) {} - VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR ) - offsetof( XlibSurfaceCreateInfoKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - XlibSurfaceCreateInfoKHR& operator=( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + XlibSurfaceCreateInfoKHR & operator=( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); + return *this; + } + + XlibSurfaceCreateInfoKHR & operator=( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( XlibSurfaceCreateInfoKHR ) ); return *this; } @@ -61128,6 +79362,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } + operator VkXlibSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -61138,19 +79373,26 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( XlibSurfaceCreateInfoKHR const& ) const = default; +#else bool operator==( XlibSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dpy == rhs.dpy ) - && ( window == rhs.window ); + && ( memcmp( &window, &rhs.window, sizeof( Window ) ) == 0 ); } bool operator!=( XlibSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif + + public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXlibSurfaceCreateInfoKHR; @@ -61158,19 +79400,668 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags = {}; Display* dpy = {}; Window window = {}; + }; static_assert( sizeof( XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = XlibSurfaceCreateInfoKHR; + }; #endif /*VK_USE_PLATFORM_XLIB_KHR*/ + class DebugReportCallbackEXT + { + public: + using CType = VkDebugReportCallbackEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT; + + public: + VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT() VULKAN_HPP_NOEXCEPT + : m_debugReportCallbackEXT(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_debugReportCallbackEXT(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DebugReportCallbackEXT( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT + : m_debugReportCallbackEXT( debugReportCallbackEXT ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DebugReportCallbackEXT & operator=(VkDebugReportCallbackEXT debugReportCallbackEXT) VULKAN_HPP_NOEXCEPT + { + m_debugReportCallbackEXT = debugReportCallbackEXT; + return *this; + } +#endif + + DebugReportCallbackEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_debugReportCallbackEXT = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DebugReportCallbackEXT const& ) const = default; +#else + bool operator==( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT == rhs.m_debugReportCallbackEXT; + } + + bool operator!=(DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT != rhs.m_debugReportCallbackEXT; + } + + bool operator<(DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT < rhs.m_debugReportCallbackEXT; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugReportCallbackEXT() const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT == VK_NULL_HANDLE; + } + + private: + VkDebugReportCallbackEXT m_debugReportCallbackEXT; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT ) == sizeof( VkDebugReportCallbackEXT ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DebugUtilsMessengerEXT + { + public: + using CType = VkDebugUtilsMessengerEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT() VULKAN_HPP_NOEXCEPT + : m_debugUtilsMessengerEXT(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_debugUtilsMessengerEXT(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DebugUtilsMessengerEXT( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT + : m_debugUtilsMessengerEXT( debugUtilsMessengerEXT ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DebugUtilsMessengerEXT & operator=(VkDebugUtilsMessengerEXT debugUtilsMessengerEXT) VULKAN_HPP_NOEXCEPT + { + m_debugUtilsMessengerEXT = debugUtilsMessengerEXT; + return *this; + } +#endif + + DebugUtilsMessengerEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_debugUtilsMessengerEXT = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DebugUtilsMessengerEXT const& ) const = default; +#else + bool operator==( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT == rhs.m_debugUtilsMessengerEXT; + } + + bool operator!=(DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT != rhs.m_debugUtilsMessengerEXT; + } + + bool operator<(DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT < rhs.m_debugUtilsMessengerEXT; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugUtilsMessengerEXT() const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT == VK_NULL_HANDLE; + } + + private: + VkDebugUtilsMessengerEXT m_debugUtilsMessengerEXT; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT ) == sizeof( VkDebugUtilsMessengerEXT ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT; + }; + + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + class Instance; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueDebugReportCallbackEXT = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueDebugUtilsMessengerEXT = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueSurfaceKHR = UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + class Instance + { + public: + using CType = VkInstance; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eInstance; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance; + + public: + VULKAN_HPP_CONSTEXPR Instance() VULKAN_HPP_NOEXCEPT + : m_instance(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Instance( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_instance(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Instance( VkInstance instance ) VULKAN_HPP_NOEXCEPT + : m_instance( instance ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Instance & operator=(VkInstance instance) VULKAN_HPP_NOEXCEPT + { + m_instance = instance; + return *this; + } +#endif + + Instance & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_instance = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Instance const& ) const = default; +#else + bool operator==( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_instance == rhs.m_instance; + } + + bool operator!=(Instance const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_instance != rhs.m_instance; + } + + bool operator<(Instance const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_instance < rhs.m_instance; + } +#endif + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + template + VULKAN_HPP_NODISCARD Result createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template + Result createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT* pCallback, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT* pMessenger, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + template + VULKAN_HPP_NODISCARD Result createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createDirectFBSurfaceEXT( const DirectFBSurfaceCreateInfoEXT & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createDirectFBSurfaceEXTUnique( const DirectFBSurfaceCreateInfoEXT & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + template + VULKAN_HPP_NODISCARD Result createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_IOS_MVK + template + VULKAN_HPP_NODISCARD Result createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#ifdef VK_USE_PLATFORM_FUCHSIA + template + VULKAN_HPP_NODISCARD Result createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#ifdef VK_USE_PLATFORM_MACOS_MVK + template + VULKAN_HPP_NODISCARD Result createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + +#ifdef VK_USE_PLATFORM_METAL_EXT + template + VULKAN_HPP_NODISCARD Result createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#ifdef VK_USE_PLATFORM_GGP + template + VULKAN_HPP_NODISCARD Result createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_GGP*/ + +#ifdef VK_USE_PLATFORM_VI_NN + template + VULKAN_HPP_NODISCARD Result createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_VI_NN*/ + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + template + VULKAN_HPP_NODISCARD Result createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_NODISCARD Result createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_XCB_KHR + template + VULKAN_HPP_NODISCARD Result createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_KHR + template + VULKAN_HPP_NODISCARD Result createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + + template + void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type enumeratePhysicalDeviceGroups(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type enumeratePhysicalDeviceGroups(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type enumeratePhysicalDeviceGroupsKHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type enumeratePhysicalDeviceGroupsKHR(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type enumeratePhysicalDevices(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type enumeratePhysicalDevices(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkInstance() const VULKAN_HPP_NOEXCEPT + { + return m_instance; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_instance != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_instance == VK_NULL_HANDLE; + } + + private: + VkInstance m_instance; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Instance ) == sizeof( VkInstance ), "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Instance; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Instance; + }; + + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Instance; + }; + + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueInstance = UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + template + VULKAN_HPP_NODISCARD Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Instance* pInstance, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createInstance( const InstanceCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ); +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateInstanceLayerProperties(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateInstanceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ); +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type enumerateInstanceVersion(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template - VULKAN_HPP_INLINE Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Instance* pInstance, Dispatch const &d) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Instance* pInstance, Dispatch const &d) VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateInstance( reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pInstance ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type createInstance( const InstanceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type createInstance( const InstanceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) { VULKAN_HPP_NAMESPACE::Instance instance; Result result = static_cast( d.vkCreateInstance( reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &instance ) ) ); @@ -61178,7 +80069,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) { VULKAN_HPP_NAMESPACE::Instance instance; Result result = static_cast( d.vkCreateInstance( reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &instance ) ) ); @@ -61190,13 +80081,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d) VULKAN_HPP_NOEXCEPT { return static_cast( d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, Dispatch const &d ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, Dispatch const &d ) { std::vector properties; uint32_t propertyCount; @@ -61217,8 +80108,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceExtensionProperties" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ) + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ) { std::vector properties( vectorAllocator ); uint32_t propertyCount; @@ -61242,13 +80133,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d) VULKAN_HPP_NOEXCEPT { return static_cast( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceLayerProperties(Dispatch const &d ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceLayerProperties(Dispatch const &d ) { std::vector properties; uint32_t propertyCount; @@ -61269,8 +80160,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceLayerProperties" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) { std::vector properties( vectorAllocator ); uint32_t propertyCount; @@ -61309,13 +80200,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( pBeginInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d ) const { Result result = static_cast( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( &beginInfo ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::begin" ); @@ -61422,7 +80313,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy counterBuffers, ArrayProxy counterBufferOffsets, Dispatch const &d ) const + VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy const &counterBuffers, ArrayProxy const &counterBufferOffsets, Dispatch const &d ) const { #ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( counterBuffers.size() == counterBufferOffsets.size() ); @@ -61443,7 +80334,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy descriptorSets, ArrayProxy dynamicOffsets, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy const &descriptorSets, ArrayProxy const &dynamicOffsets, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), firstSet, descriptorSets.size() , reinterpret_cast( descriptorSets.data() ), dynamicOffsets.size() , dynamicOffsets.data() ); } @@ -61477,6 +80368,20 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ), groupIndex ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ), groupIndex ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT @@ -61498,7 +80403,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy buffers, ArrayProxy offsets, ArrayProxy sizes, Dispatch const &d ) const + VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy const &buffers, ArrayProxy const &offsets, ArrayProxy const &sizes, Dispatch const &d ) const { #ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); @@ -61535,7 +80440,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, ArrayProxy buffers, ArrayProxy offsets, Dispatch const &d ) const + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, ArrayProxy const &buffers, ArrayProxy const &offsets, Dispatch const &d ) const { #ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); @@ -61549,6 +80454,67 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, const VULKAN_HPP_NAMESPACE::DeviceSize* pStrides, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast( pBuffers ), reinterpret_cast( pOffsets ), reinterpret_cast( pSizes ), reinterpret_cast( pStrides ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, ArrayProxy const &buffers, ArrayProxy const &offsets, ArrayProxy const &sizes, ArrayProxy const &strides, Dispatch const &d ) const + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); +#else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == sizes.size() ); +#else + if ( buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == strides.size() ); +#else + if ( buffers.size() != strides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( offsets.size() == sizes.size() ); +#else + if ( offsets.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers2EXT: offsets.size() != sizes.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( offsets.size() == strides.size() ); +#else + if ( offsets.size() != strides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers2EXT: offsets.size() != strides.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( sizes.size() == strides.size() ); +#else + if ( sizes.size() != strides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers2EXT: sizes.size() != strides.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast( buffers.data() ), reinterpret_cast( offsets.data() ), reinterpret_cast( sizes.data() ), reinterpret_cast( strides.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -61556,22 +80522,73 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy const ®ions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdBlitImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ), static_cast( filter ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR* pBlitImageInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast( pInfo ), static_cast( instanceData ), static_cast( instanceOffset ), static_cast( update ), static_cast( dst ), static_cast( src ), static_cast( scratch ), static_cast( scratchOffset ) ); + d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast( pBlitImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const BlitImageInfo2KHR & blitImageInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast( &info ), static_cast( instanceData ), static_cast( instanceOffset ), static_cast( update ), static_cast( dst ), static_cast( src ), static_cast( scratch ), static_cast( scratchOffset ) ); + d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast( &blitImageInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureIndirectKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBuildAccelerationStructureIndirectKHR( m_commandBuffer, reinterpret_cast( pInfo ), static_cast( indirectBuffer ), static_cast( indirectOffset ), indirectStride ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureIndirectKHR( const AccelerationStructureBuildGeometryInfoKHR & info, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBuildAccelerationStructureIndirectKHR( m_commandBuffer, reinterpret_cast( &info ), static_cast( indirectBuffer ), static_cast( indirectOffset ), indirectStride ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const * ppOffsetInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBuildAccelerationStructureKHR( m_commandBuffer, infoCount, reinterpret_cast( pInfos ), reinterpret_cast( ppOffsetInfos ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureKHR( ArrayProxy const &infos, ArrayProxy const &pOffsetInfos, Dispatch const &d ) const + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == pOffsetInfos.size() ); +#else + if ( infos.size() != pOffsetInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::buildAccelerationStructureKHR: infos.size() != pOffsetInfos.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + d.vkCmdBuildAccelerationStructureKHR( m_commandBuffer, infos.size() , reinterpret_cast( infos.data() ), reinterpret_cast( pOffsetInfos.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast( pInfo ), static_cast( instanceData ), static_cast( instanceOffset ), static_cast( update ), static_cast( dst ), static_cast( src ), static_cast( scratch ), static_cast( scratchOffset ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast( &info ), static_cast( instanceData ), static_cast( instanceOffset ), static_cast( update ), static_cast( dst ), static_cast( src ), static_cast( scratch ), static_cast( scratchOffset ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -61582,7 +80599,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( ArrayProxy attachments, ArrayProxy rects, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( ArrayProxy const &attachments, ArrayProxy const &rects, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdClearAttachments( m_commandBuffer, attachments.size() , reinterpret_cast( attachments.data() ), rects.size() , reinterpret_cast( rects.data() ) ); } @@ -61595,7 +80612,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy ranges, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy const &ranges, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdClearColorImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( &color ), ranges.size() , reinterpret_cast( ranges.data() ) ); } @@ -61608,26 +80625,56 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy ranges, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy const &ranges, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( &depthStencil ), ranges.size() , reinterpret_cast( ranges.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( pInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeNV mode, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast( dst ), static_cast( src ), static_cast( mode ) ); + d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast( dst ), static_cast( src ), static_cast( mode ) ); } #else template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeNV mode, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast( dst ), static_cast( src ), static_cast( mode ) ); + d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast( dst ), static_cast( src ), static_cast( mode ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast( pInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -61635,12 +80682,25 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy const ®ions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdCopyBuffer( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstBuffer ), regions.size() , reinterpret_cast( regions.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR* pCopyBufferInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast( pCopyBufferInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const CopyBufferInfo2KHR & copyBufferInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast( ©BufferInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -61648,12 +80708,25 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy const ®ions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR* pCopyBufferToImageInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast( pCopyBufferToImageInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const CopyBufferToImageInfo2KHR & copyBufferToImageInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast( ©BufferToImageInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -61661,12 +80734,25 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy const ®ions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdCopyImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR* pCopyImageInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast( pCopyImageInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const CopyImageInfo2KHR & copyImageInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast( ©ImageInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -61674,12 +80760,40 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy const ®ions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstBuffer ), regions.size() , reinterpret_cast( regions.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR* pCopyImageToBufferInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast( pCopyImageToBufferInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const CopyImageToBufferInfo2KHR & copyImageToBufferInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast( ©ImageToBufferInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( pInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT @@ -62089,7 +81203,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy counterBuffers, ArrayProxy counterBufferOffsets, Dispatch const &d ) const + VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy const &counterBuffers, ArrayProxy const &counterBufferOffsets, Dispatch const &d ) const { #ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( counterBuffers.size() == counterBufferOffsets.size() ); @@ -62110,12 +81224,25 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::executeCommands( ArrayProxy commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::executeCommands( ArrayProxy const &commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size() , reinterpret_cast( commandBuffers.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, static_cast( isPreprocessed ), reinterpret_cast( pGeneratedCommandsInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, static_cast( isPreprocessed ), reinterpret_cast( &generatedCommandsInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d) const VULKAN_HPP_NOEXCEPT @@ -62190,22 +81317,22 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy memoryBarriers, ArrayProxy bufferMemoryBarriers, ArrayProxy imageMemoryBarriers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy const &memoryBarriers, ArrayProxy const &bufferMemoryBarriers, ArrayProxy const &imageMemoryBarriers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdPipelineBarrier( m_commandBuffer, static_cast( srcStageMask ), static_cast( dstStageMask ), static_cast( dependencyFlags ), memoryBarriers.size() , reinterpret_cast( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast( imageMemoryBarriers.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE void CommandBuffer::processCommandsNVX( const VULKAN_HPP_NAMESPACE::CmdProcessCommandsInfoNVX* pProcessCommandsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { - d.vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast( pProcessCommandsInfo ) ); + d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast( pGeneratedCommandsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast( &processCommandsInfo ) ); + d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast( &generatedCommandsInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -62216,7 +81343,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy values, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy const &values, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdPushConstants( m_commandBuffer, static_cast( layout ), static_cast( stageFlags ), offset, values.size() * sizeof( T ) , reinterpret_cast( values.data() ) ); } @@ -62229,7 +81356,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy descriptorWrites, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy const &descriptorWrites, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), set, descriptorWrites.size() , reinterpret_cast( descriptorWrites.data() ) ); } @@ -62249,19 +81376,6 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::reserveSpaceForCommandsNVX( const VULKAN_HPP_NAMESPACE::CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast( pReserveSpaceInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast( &reserveSpaceInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT @@ -62297,12 +81411,25 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy const ®ions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdResolveImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR* pResolveImageInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast( pResolveImageInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const ResolveImageInfo2KHR & resolveImageInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast( &resolveImageInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const &d) const VULKAN_HPP_NOEXCEPT @@ -62338,12 +81465,26 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy customSampleOrders, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy const &customSampleOrders, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast( sampleOrderType ), customSampleOrders.size() , reinterpret_cast( customSampleOrders.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast( cullMode ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast( cullMode ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d) const VULKAN_HPP_NOEXCEPT @@ -62372,6 +81513,62 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast( depthBoundsTestEnable ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast( depthBoundsTestEnable ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast( depthCompareOp ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast( depthCompareOp ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast( depthTestEnable ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast( depthTestEnable ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast( depthWriteEnable ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast( depthWriteEnable ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT @@ -62407,7 +81604,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy discardRectangles, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy const &discardRectangles, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangles.size() , reinterpret_cast( discardRectangles.data() ) ); } @@ -62434,12 +81631,26 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy exclusiveScissors, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy const &exclusiveScissors, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size() , reinterpret_cast( exclusiveScissors.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast( frontFace ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast( frontFace ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d) const VULKAN_HPP_NOEXCEPT @@ -62469,13 +81680,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const &d ) const { Result result = static_cast( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceMarkerINTEL" ); @@ -62483,13 +81694,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast( pOverrideInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const &d ) const { Result result = static_cast( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast( &overrideInfo ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceOverrideINTEL" ); @@ -62497,19 +81708,33 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const &d ) const { Result result = static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceStreamMarkerINTEL" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast( primitiveTopology ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast( primitiveTopology ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -62530,12 +81755,25 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, ArrayProxy scissors, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, ArrayProxy const &scissors, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size() , reinterpret_cast( scissors.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast( pScissors ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( ArrayProxy const &scissors, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissors.size() , reinterpret_cast( scissors.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT @@ -62550,6 +81788,20 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetStencilOpEXT( m_commandBuffer, static_cast( faceMask ), static_cast( failOp ), static_cast( passOp ), static_cast( depthFailOp ), static_cast( compareOp ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetStencilOpEXT( m_commandBuffer, static_cast( faceMask ), static_cast( failOp ), static_cast( passOp ), static_cast( depthFailOp ), static_cast( compareOp ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d) const VULKAN_HPP_NOEXCEPT @@ -62564,6 +81816,20 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast( stencilTestEnable ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast( stencilTestEnable ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT @@ -62585,7 +81851,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, ArrayProxy viewports, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, ArrayProxy const &viewports, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size() , reinterpret_cast( viewports.data() ) ); } @@ -62598,7 +81864,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy shadingRatePalettes, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy const &shadingRatePalettes, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, shadingRatePalettes.size() , reinterpret_cast( shadingRatePalettes.data() ) ); } @@ -62611,12 +81877,55 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, ArrayProxy viewportWScalings, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, ArrayProxy const &viewportWScalings, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportWScalings.size() , reinterpret_cast( viewportWScalings.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewportCount, reinterpret_cast( pViewports ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( ArrayProxy const &viewports, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewports.size() , reinterpret_cast( viewports.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, reinterpret_cast( pRaygenShaderBindingTable ), reinterpret_cast( pMissShaderBindingTable ), reinterpret_cast( pHitShaderBindingTable ), reinterpret_cast( pCallableShaderBindingTable ), static_cast( buffer ), static_cast( offset ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, reinterpret_cast( &raygenShaderBindingTable ), reinterpret_cast( &missShaderBindingTable ), reinterpret_cast( &hitShaderBindingTable ), reinterpret_cast( &callableShaderBindingTable ), static_cast( buffer ), static_cast( offset ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdTraceRaysKHR( m_commandBuffer, reinterpret_cast( pRaygenShaderBindingTable ), reinterpret_cast( pMissShaderBindingTable ), reinterpret_cast( pHitShaderBindingTable ), reinterpret_cast( pCallableShaderBindingTable ), width, height, depth ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdTraceRaysKHR( m_commandBuffer, reinterpret_cast( &raygenShaderBindingTable ), reinterpret_cast( &missShaderBindingTable ), reinterpret_cast( &hitShaderBindingTable ), reinterpret_cast( &callableShaderBindingTable ), width, height, depth ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d) const VULKAN_HPP_NOEXCEPT @@ -62638,7 +81947,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy data, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy const &data, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdUpdateBuffer( m_commandBuffer, static_cast( dstBuffer ), static_cast( dstOffset ), data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ); } @@ -62651,22 +81960,37 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::waitEvents( ArrayProxy events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy memoryBarriers, ArrayProxy bufferMemoryBarriers, ArrayProxy imageMemoryBarriers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::waitEvents( ArrayProxy const &events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy const &memoryBarriers, ArrayProxy const &bufferMemoryBarriers, ArrayProxy const &imageMemoryBarriers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdWaitEvents( m_commandBuffer, events.size() , reinterpret_cast( events.data() ), static_cast( srcStageMask ), static_cast( dstStageMask ), memoryBarriers.size() , reinterpret_cast( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast( imageMemoryBarriers.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS template - VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { - d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructureCount, reinterpret_cast( pAccelerationStructures ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); + d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, accelerationStructureCount, reinterpret_cast( pAccelerationStructures ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( ArrayProxy accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( ArrayProxy const &accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructures.size() , reinterpret_cast( accelerationStructures.data() ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); + d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, accelerationStructures.size() , reinterpret_cast( accelerationStructures.data() ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructureCount, reinterpret_cast( pAccelerationStructures ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( ArrayProxy const &accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructures.size() , reinterpret_cast( accelerationStructures.data() ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -62700,13 +82024,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE Result CommandBuffer::end(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end(Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); } #else template - VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::end(Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::end(Dispatch const &d ) const { Result result = static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::end" ); @@ -62731,13 +82055,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_WIN32_KHR #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); } #else template - VULKAN_HPP_INLINE typename ResultValueType::type Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const { Result result = static_cast( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireFullScreenExclusiveModeEXT" ); @@ -62746,13 +82070,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_WIN32_KHR*/ template - VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast( pAcquireInfo ), pImageIndex ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE ResultValue Device::acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue Device::acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const &d ) const { uint32_t imageIndex; Result result = static_cast( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast( &acquireInfo ), &imageIndex ) ); @@ -62761,13 +82085,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, uint32_t* pImageIndex, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, uint32_t* pImageIndex, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkAcquireNextImageKHR( m_device, static_cast( swapchain ), timeout, static_cast( semaphore ), static_cast( fence ), pImageIndex ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE ResultValue Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const { uint32_t imageIndex; Result result = static_cast( d.vkAcquireNextImageKHR( m_device, static_cast( swapchain ), timeout, static_cast( semaphore ), static_cast( fence ), &imageIndex ) ); @@ -62776,13 +82100,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL* pConfiguration, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL* pConfiguration, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkAcquirePerformanceConfigurationINTEL( m_device, reinterpret_cast( pAcquireInfo ), reinterpret_cast( pConfiguration ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; Result result = static_cast( d.vkAcquirePerformanceConfigurationINTEL( m_device, reinterpret_cast( &acquireInfo ), reinterpret_cast( &configuration ) ) ); @@ -62791,13 +82115,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const &d ) const { Result result = static_cast( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast( &info ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireProfilingLockKHR" ); @@ -62805,137 +82129,133 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( pAllocateInfo ), reinterpret_cast( pCommandBuffers ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d ) const { std::vector commandBuffers( allocateInfo.commandBufferCount ); Result result = static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateCommandBuffers" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector commandBuffers( allocateInfo.commandBufferCount, vectorAllocator ); Result result = static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateCommandBuffers" ); } #ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d ) const { - static_assert( sizeof( CommandBuffer ) <= sizeof( UniqueHandle ), "CommandBuffer is greater than UniqueHandle!" ); - std::vector, Allocator> commandBuffers; - commandBuffers.reserve( allocateInfo.commandBufferCount ); - CommandBuffer* buffer = reinterpret_cast( reinterpret_cast( commandBuffers.data() ) + allocateInfo.commandBufferCount * ( sizeof( UniqueHandle ) - sizeof( CommandBuffer ) ) ); - Result result = static_cast(d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( buffer ) ) ); + std::vector, Allocator> uniqueCommandBuffers; + std::vector commandBuffers( allocateInfo.commandBufferCount ); + Result result = static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast(commandBuffers.data()) ) ); if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) { + uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); PoolFree deleter( *this, allocateInfo.commandPool, d ); for ( size_t i=0 ; i( buffer[i], deleter ) ); + uniqueCommandBuffers.push_back( UniqueHandle( commandBuffers[i], deleter ) ); } } - return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); + return createResultValue( result, std::move( uniqueCommandBuffers ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); } - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + template>::value, int>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const { - static_assert( sizeof( CommandBuffer ) <= sizeof( UniqueHandle ), "CommandBuffer is greater than UniqueHandle!" ); - std::vector, Allocator> commandBuffers( vectorAllocator ); - commandBuffers.reserve( allocateInfo.commandBufferCount ); - CommandBuffer* buffer = reinterpret_cast( reinterpret_cast( commandBuffers.data() ) + allocateInfo.commandBufferCount * ( sizeof( UniqueHandle ) - sizeof( CommandBuffer ) ) ); - Result result = static_cast(d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( buffer ) ) ); + std::vector, Allocator> uniqueCommandBuffers( vectorAllocator ); + std::vector commandBuffers( allocateInfo.commandBufferCount ); + Result result = static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast(commandBuffers.data()) ) ); if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) { + uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); PoolFree deleter( *this, allocateInfo.commandPool, d ); for ( size_t i=0 ; i( buffer[i], deleter ) ); + uniqueCommandBuffers.push_back( UniqueHandle( commandBuffers[i], deleter ) ); } } - return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); + return createResultValue( result, std::move( uniqueCommandBuffers ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); } #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( pAllocateInfo ), reinterpret_cast( pDescriptorSets ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d ) const { std::vector descriptorSets( allocateInfo.descriptorSetCount ); Result result = static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateDescriptorSets" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector descriptorSets( allocateInfo.descriptorSetCount, vectorAllocator ); Result result = static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateDescriptorSets" ); } #ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d ) const { - static_assert( sizeof( DescriptorSet ) <= sizeof( UniqueHandle ), "DescriptorSet is greater than UniqueHandle!" ); - std::vector, Allocator> descriptorSets; - descriptorSets.reserve( allocateInfo.descriptorSetCount ); - DescriptorSet* buffer = reinterpret_cast( reinterpret_cast( descriptorSets.data() ) + allocateInfo.descriptorSetCount * ( sizeof( UniqueHandle ) - sizeof( DescriptorSet ) ) ); - Result result = static_cast(d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( buffer ) ) ); + std::vector, Allocator> uniqueDescriptorSets; + std::vector descriptorSets( allocateInfo.descriptorSetCount ); + Result result = static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast(descriptorSets.data()) ) ); if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) { + uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); PoolFree deleter( *this, allocateInfo.descriptorPool, d ); for ( size_t i=0 ; i( buffer[i], deleter ) ); + uniqueDescriptorSets.push_back( UniqueHandle( descriptorSets[i], deleter ) ); } } - return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); + return createResultValue( result, std::move( uniqueDescriptorSets ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); } - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + template>::value, int>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const { - static_assert( sizeof( DescriptorSet ) <= sizeof( UniqueHandle ), "DescriptorSet is greater than UniqueHandle!" ); - std::vector, Allocator> descriptorSets( vectorAllocator ); - descriptorSets.reserve( allocateInfo.descriptorSetCount ); - DescriptorSet* buffer = reinterpret_cast( reinterpret_cast( descriptorSets.data() ) + allocateInfo.descriptorSetCount * ( sizeof( UniqueHandle ) - sizeof( DescriptorSet ) ) ); - Result result = static_cast(d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( buffer ) ) ); + std::vector, Allocator> uniqueDescriptorSets( vectorAllocator ); + std::vector descriptorSets( allocateInfo.descriptorSetCount ); + Result result = static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast(descriptorSets.data()) ) ); if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) { + uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); PoolFree deleter( *this, allocateInfo.descriptorPool, d ); for ( size_t i=0 ; i( buffer[i], deleter ) ); + uniqueDescriptorSets.push_back( UniqueHandle( descriptorSets[i], deleter ) ); } } - return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); + return createResultValue( result, std::move( uniqueDescriptorSets ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); } #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo* pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory* pMemory, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo* pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory* pMemory, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkAllocateMemory( m_device, reinterpret_cast( pAllocateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pMemory ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::DeviceMemory memory; Result result = static_cast( d.vkAllocateMemory( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &memory ) ) ); @@ -62943,7 +82263,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::DeviceMemory memory; Result result = static_cast( d.vkAllocateMemory( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &memory ) ) ); @@ -62954,29 +82274,45 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS template - VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryKHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + return static_cast( d.vkBindAccelerationStructureMemoryKHR( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::bindAccelerationStructureMemoryNV( ArrayProxy bindInfos, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::bindAccelerationStructureMemoryKHR( ArrayProxy const &bindInfos, Dispatch const &d ) const { - Result result = static_cast( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); + Result result = static_cast( d.vkBindAccelerationStructureMemoryKHR( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindAccelerationStructureMemoryKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::bindAccelerationStructureMemoryNV( ArrayProxy const &bindInfos, Dispatch const &d ) const + { + Result result = static_cast( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindAccelerationStructureMemoryNV" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkBindBufferMemory( m_device, static_cast( buffer ), static_cast( memory ), static_cast( memoryOffset ) ) ); } #else template - VULKAN_HPP_INLINE typename ResultValueType::type Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d ) const { Result result = static_cast( d.vkBindBufferMemory( m_device, static_cast( buffer ), static_cast( memory ), static_cast( memoryOffset ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory" ); @@ -62984,13 +82320,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::bindBufferMemory2( ArrayProxy bindInfos, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::bindBufferMemory2( ArrayProxy const &bindInfos, Dispatch const &d ) const { Result result = static_cast( d.vkBindBufferMemory2( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory2" ); @@ -62998,13 +82334,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkBindBufferMemory2KHR( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::bindBufferMemory2KHR( ArrayProxy bindInfos, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::bindBufferMemory2KHR( ArrayProxy const &bindInfos, Dispatch const &d ) const { Result result = static_cast( d.vkBindBufferMemory2KHR( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory2KHR" ); @@ -63013,13 +82349,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkBindImageMemory( m_device, static_cast( image ), static_cast( memory ), static_cast( memoryOffset ) ) ); } #else template - VULKAN_HPP_INLINE typename ResultValueType::type Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d ) const { Result result = static_cast( d.vkBindImageMemory( m_device, static_cast( image ), static_cast( memory ), static_cast( memoryOffset ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory" ); @@ -63027,13 +82363,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::bindImageMemory2( ArrayProxy bindInfos, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::bindImageMemory2( ArrayProxy const &bindInfos, Dispatch const &d ) const { Result result = static_cast( d.vkBindImageMemory2( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory2" ); @@ -63041,34 +82377,134 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkBindImageMemory2KHR( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::bindImageMemory2KHR( ArrayProxy bindInfos, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::bindImageMemory2KHR( ArrayProxy const &bindInfos, Dispatch const &d ) const { Result result = static_cast( d.vkBindImageMemory2KHR( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory2KHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildAccelerationStructureKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const * ppOffsetInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkBuildAccelerationStructureKHR( m_device, infoCount, reinterpret_cast( pInfos ), reinterpret_cast( ppOffsetInfos ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildAccelerationStructureKHR( ArrayProxy const &infos, ArrayProxy const &pOffsetInfos, Dispatch const &d ) const + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == pOffsetInfos.size() ); +#else + if ( infos.size() != pOffsetInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::buildAccelerationStructureKHR: infos.size() != pOffsetInfos.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + Result result = static_cast( d.vkBuildAccelerationStructureKHR( m_device, infos.size() , reinterpret_cast( infos.data() ), reinterpret_cast( pOffsetInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::buildAccelerationStructureKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE Result Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ) ); } #else template - VULKAN_HPP_INLINE typename ResultValueType::type Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d ) const { Result result = static_cast( d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::compileDeferredNV" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCopyAccelerationStructureKHR( m_device, reinterpret_cast( pInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const &d ) const + { + Result result = static_cast( d.vkCopyAccelerationStructureKHR( m_device, reinterpret_cast( &info ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::copyAccelerationStructureKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCopyAccelerationStructureToMemoryKHR( m_device, reinterpret_cast( pInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const &d ) const + { + Result result = static_cast( d.vkCopyAccelerationStructureToMemoryKHR( m_device, reinterpret_cast( &info ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::copyAccelerationStructureToMemoryKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCopyMemoryToAccelerationStructureKHR( m_device, reinterpret_cast( pInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const &d ) const + { + Result result = static_cast( d.vkCopyMemoryToAccelerationStructureKHR( m_device, reinterpret_cast( &info ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::copyMemoryToAccelerationStructureKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructure, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateAccelerationStructureKHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pAccelerationStructure ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::createAccelerationStructureKHR( const AccelerationStructureCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; + Result result = static_cast( d.vkCreateAccelerationStructureKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &accelerationStructure ) ) ); + return createResultValue( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING"::Device::createAccelerationStructureKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createAccelerationStructureKHRUnique( const AccelerationStructureCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; + Result result = static_cast( d.vkCreateAccelerationStructureKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &accelerationStructure ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING"::Device::createAccelerationStructureKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template VULKAN_HPP_INLINE Result Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructure, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -63096,13 +82532,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Buffer* pBuffer, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Buffer* pBuffer, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateBuffer( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pBuffer ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createBuffer( const BufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::createBuffer( const BufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::Buffer buffer; Result result = static_cast( d.vkCreateBuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &buffer ) ) ); @@ -63110,7 +82546,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createBufferUnique( const BufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createBufferUnique( const BufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::Buffer buffer; Result result = static_cast( d.vkCreateBuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &buffer ) ) ); @@ -63122,13 +82558,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::BufferView* pView, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::BufferView* pView, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateBufferView( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pView ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createBufferView( const BufferViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::createBufferView( const BufferViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::BufferView view; Result result = static_cast( d.vkCreateBufferView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); @@ -63136,7 +82572,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::BufferView view; Result result = static_cast( d.vkCreateBufferView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); @@ -63148,13 +82584,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::CommandPool* pCommandPool, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::CommandPool* pCommandPool, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateCommandPool( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pCommandPool ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createCommandPool( const CommandPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::createCommandPool( const CommandPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::CommandPool commandPool; Result result = static_cast( d.vkCreateCommandPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &commandPool ) ) ); @@ -63162,7 +82598,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::CommandPool commandPool; Result result = static_cast( d.vkCreateCommandPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &commandPool ) ) ); @@ -63174,91 +82610,117 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelines ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Dispatch const &d ) const { std::vector pipelines( createInfos.size() ); Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelines" ); + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelines", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector pipelines( createInfos.size(), vectorAllocator ); Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelines" ); + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelines", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); } template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { Pipeline pipeline; Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipeline" ); + return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipeline", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); } #ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue,Allocator>> Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Dispatch const &d ) const { - static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle ), "Pipeline is greater than UniqueHandle!" ); - std::vector, Allocator> pipelines; - pipelines.reserve( createInfos.size() ); - Pipeline* buffer = reinterpret_cast( reinterpret_cast( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle ) - sizeof( Pipeline ) ) ); - Result result = static_cast(d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); - if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + std::vector, Allocator> uniquePipelines; + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(pipelines.data()) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) { + uniquePipelines.reserve( createInfos.size() ); ObjectDestroy deleter( *this, allocator, d ); for ( size_t i=0 ; i( buffer[i], deleter ) ); + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); } } - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique" ); + return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); } - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + template>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue,Allocator>> Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const { - static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle ), "Pipeline is greater than UniqueHandle!" ); - std::vector, Allocator> pipelines( vectorAllocator ); - pipelines.reserve( createInfos.size() ); - Pipeline* buffer = reinterpret_cast( reinterpret_cast( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle ) - sizeof( Pipeline ) ) ); - Result result = static_cast(d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); - if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + std::vector, Allocator> uniquePipelines( vectorAllocator ); + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(pipelines.data()) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) { + uniquePipelines.reserve( createInfos.size() ); ObjectDestroy deleter( *this, allocator, d ); for ( size_t i=0 ; i( buffer[i], deleter ) ); + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); } } - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique" ); + return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); } template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { Pipeline pipeline; Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelineUnique", deleter ); + return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelineUnique", { Result::eSuccess, Result::ePipelineCompileRequiredEXT }, deleter ); } #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS template - VULKAN_HPP_INLINE Result Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool* pDescriptorPool, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE Result Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeferredOperationKHR* pDeferredOperation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateDeferredOperationKHR( m_device, reinterpret_cast( pAllocator ), reinterpret_cast( pDeferredOperation ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createDeferredOperationKHR( Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; + Result result = static_cast( d.vkCreateDeferredOperationKHR( m_device, reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &deferredOperation ) ) ); + return createResultValue( result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING"::Device::createDeferredOperationKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDeferredOperationKHRUnique( Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; + Result result = static_cast( d.vkCreateDeferredOperationKHR( m_device, reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &deferredOperation ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING"::Device::createDeferredOperationKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool* pDescriptorPool, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateDescriptorPool( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDescriptorPool ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; Result result = static_cast( d.vkCreateDescriptorPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorPool ) ) ); @@ -63266,7 +82728,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; Result result = static_cast( d.vkCreateDescriptorPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorPool ) ) ); @@ -63278,13 +82740,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSetLayout ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; Result result = static_cast( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &setLayout ) ) ); @@ -63292,7 +82754,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; Result result = static_cast( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &setLayout ) ) ); @@ -63304,13 +82766,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDescriptorUpdateTemplate ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; Result result = static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); @@ -63318,7 +82780,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; Result result = static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); @@ -63330,13 +82792,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDescriptorUpdateTemplate ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; Result result = static_cast( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); @@ -63344,7 +82806,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; Result result = static_cast( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); @@ -63356,13 +82818,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Event* pEvent, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Event* pEvent, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateEvent( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pEvent ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createEvent( const EventCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::createEvent( const EventCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::Event event; Result result = static_cast( d.vkCreateEvent( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &event ) ) ); @@ -63370,7 +82832,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createEventUnique( const EventCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createEventUnique( const EventCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::Event event; Result result = static_cast( d.vkCreateEvent( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &event ) ) ); @@ -63382,13 +82844,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateFence( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFence ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createFence( const FenceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::createFence( const FenceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::Fence fence; Result result = static_cast( d.vkCreateFence( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); @@ -63396,7 +82858,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createFenceUnique( const FenceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createFenceUnique( const FenceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::Fence fence; Result result = static_cast( d.vkCreateFence( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); @@ -63408,13 +82870,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer* pFramebuffer, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer* pFramebuffer, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateFramebuffer( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFramebuffer ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createFramebuffer( const FramebufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::createFramebuffer( const FramebufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; Result result = static_cast( d.vkCreateFramebuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &framebuffer ) ) ); @@ -63422,7 +82884,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; Result result = static_cast( d.vkCreateFramebuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &framebuffer ) ) ); @@ -63434,91 +82896,89 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelines ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Dispatch const &d ) const { std::vector pipelines( createInfos.size() ); Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelines" ); + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelines", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector pipelines( createInfos.size(), vectorAllocator ); Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelines" ); + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelines", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); } template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { Pipeline pipeline; Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipeline" ); + return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipeline", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); } #ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue,Allocator>> Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Dispatch const &d ) const { - static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle ), "Pipeline is greater than UniqueHandle!" ); - std::vector, Allocator> pipelines; - pipelines.reserve( createInfos.size() ); - Pipeline* buffer = reinterpret_cast( reinterpret_cast( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle ) - sizeof( Pipeline ) ) ); - Result result = static_cast(d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); - if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + std::vector, Allocator> uniquePipelines; + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(pipelines.data()) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) { + uniquePipelines.reserve( createInfos.size() ); ObjectDestroy deleter( *this, allocator, d ); for ( size_t i=0 ; i( buffer[i], deleter ) ); + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); } } - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique" ); + return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); } - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + template>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue,Allocator>> Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const { - static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle ), "Pipeline is greater than UniqueHandle!" ); - std::vector, Allocator> pipelines( vectorAllocator ); - pipelines.reserve( createInfos.size() ); - Pipeline* buffer = reinterpret_cast( reinterpret_cast( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle ) - sizeof( Pipeline ) ) ); - Result result = static_cast(d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); - if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + std::vector, Allocator> uniquePipelines( vectorAllocator ); + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(pipelines.data()) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) { + uniquePipelines.reserve( createInfos.size() ); ObjectDestroy deleter( *this, allocator, d ); for ( size_t i=0 ; i( buffer[i], deleter ) ); + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); } } - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique" ); + return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); } template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { Pipeline pipeline; Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelineUnique", deleter ); + return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelineUnique", { Result::eSuccess, Result::ePipelineCompileRequiredEXT }, deleter ); } #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Image* pImage, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Image* pImage, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateImage( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pImage ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createImage( const ImageCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::createImage( const ImageCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::Image image; Result result = static_cast( d.vkCreateImage( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &image ) ) ); @@ -63526,7 +82986,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createImageUnique( const ImageCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createImageUnique( const ImageCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::Image image; Result result = static_cast( d.vkCreateImage( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &image ) ) ); @@ -63538,13 +82998,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ImageView* pView, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ImageView* pView, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateImageView( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pView ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createImageView( const ImageViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::createImageView( const ImageViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::ImageView view; Result result = static_cast( d.vkCreateImageView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); @@ -63552,7 +83012,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::ImageView view; Result result = static_cast( d.vkCreateImageView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); @@ -63564,65 +83024,39 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNVX( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX* pIndirectCommandsLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV* pIndirectCommandsLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pIndirectCommandsLayout ) ) ); + return static_cast( d.vkCreateIndirectCommandsLayoutNV( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pIndirectCommandsLayout ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::createIndirectCommandsLayoutNV( const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const { - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout; - Result result = static_cast( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &indirectCommandsLayout ) ) ); - return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNVX" ); + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; + Result result = static_cast( d.vkCreateIndirectCommandsLayoutNV( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &indirectCommandsLayout ) ) ); + return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNV" ); } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createIndirectCommandsLayoutNVXUnique( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createIndirectCommandsLayoutNVUnique( const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const { - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout; - Result result = static_cast( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &indirectCommandsLayout ) ) ); + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; + Result result = static_cast( d.vkCreateIndirectCommandsLayoutNV( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &indirectCommandsLayout ) ) ); ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNVXUnique", deleter ); + return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNVUnique", deleter ); } #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::createObjectTableNVX( const VULKAN_HPP_NAMESPACE::ObjectTableCreateInfoNVX* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ObjectTableNVX* pObjectTable, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateObjectTableNVX( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pObjectTable ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable; - Result result = static_cast( d.vkCreateObjectTableNVX( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &objectTable ) ) ); - return createResultValue( result, objectTable, VULKAN_HPP_NAMESPACE_STRING"::Device::createObjectTableNVX" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createObjectTableNVXUnique( const ObjectTableCreateInfoNVX & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable; - Result result = static_cast( d.vkCreateObjectTableNVX( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &objectTable ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, objectTable, VULKAN_HPP_NAMESPACE_STRING"::Device::createObjectTableNVXUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache* pPipelineCache, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache* pPipelineCache, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreatePipelineCache( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelineCache ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; Result result = static_cast( d.vkCreatePipelineCache( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineCache ) ) ); @@ -63630,7 +83064,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; Result result = static_cast( d.vkCreatePipelineCache( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineCache ) ) ); @@ -63642,13 +83076,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout* pPipelineLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout* pPipelineLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreatePipelineLayout( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelineLayout ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; Result result = static_cast( d.vkCreatePipelineLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineLayout ) ) ); @@ -63656,7 +83090,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; Result result = static_cast( d.vkCreatePipelineLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineLayout ) ) ); @@ -63668,13 +83102,39 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::QueryPool* pQueryPool, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE Result Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT* pPrivateDataSlot, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreatePrivateDataSlotEXT( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pPrivateDataSlot ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createPrivateDataSlotEXT( const PrivateDataSlotCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot; + Result result = static_cast( d.vkCreatePrivateDataSlotEXT( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &privateDataSlot ) ) ); + return createResultValue( result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING"::Device::createPrivateDataSlotEXT" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createPrivateDataSlotEXTUnique( const PrivateDataSlotCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot; + Result result = static_cast( d.vkCreatePrivateDataSlotEXT( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &privateDataSlot ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING"::Device::createPrivateDataSlotEXTUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::QueryPool* pQueryPool, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateQueryPool( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pQueryPool ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createQueryPool( const QueryPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::createQueryPool( const QueryPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::QueryPool queryPool; Result result = static_cast( d.vkCreateQueryPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &queryPool ) ) ); @@ -63682,7 +83142,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::QueryPool queryPool; Result result = static_cast( d.vkCreateQueryPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &queryPool ) ) ); @@ -63693,92 +83153,168 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS template - VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelines ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Dispatch const &d ) const + { + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR, Result::ePipelineCompileRequiredEXT } ); + } + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector pipelines( createInfos.size(), vectorAllocator ); + Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR, Result::ePipelineCompileRequiredEXT } ); + } + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + Pipeline pipeline; + Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); + return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR, Result::ePipelineCompileRequiredEXT } ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue,Allocator>> Device::createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Dispatch const &d ) const + { + std::vector, Allocator> uniquePipelines; + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(pipelines.data()) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) || ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i=0 ; i( pipelines[i], deleter ) ); + } + } + + return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + template>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue,Allocator>> Device::createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector, Allocator> uniquePipelines( vectorAllocator ); + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(pipelines.data()) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) || ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i=0 ; i( pipelines[i], deleter ) ); + } + } + + return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + Pipeline pipeline; + Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineKHRUnique", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR, Result::ePipelineCompileRequiredEXT }, deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelines ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Dispatch const &d ) const { std::vector pipelines( createInfos.size() ); Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesNV" ); + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesNV", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector pipelines( createInfos.size(), vectorAllocator ); Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesNV" ); + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesNV", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); } template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const { Pipeline pipeline; Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineNV" ); + return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineNV", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); } #ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue,Allocator>> Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Dispatch const &d ) const { - static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle ), "Pipeline is greater than UniqueHandle!" ); - std::vector, Allocator> pipelines; - pipelines.reserve( createInfos.size() ); - Pipeline* buffer = reinterpret_cast( reinterpret_cast( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle ) - sizeof( Pipeline ) ) ); - Result result = static_cast(d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); - if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + std::vector, Allocator> uniquePipelines; + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(pipelines.data()) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) { + uniquePipelines.reserve( createInfos.size() ); ObjectDestroy deleter( *this, allocator, d ); for ( size_t i=0 ; i( buffer[i], deleter ) ); + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); } } - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique" ); + return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); } - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + template>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue,Allocator>> Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const { - static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle ), "Pipeline is greater than UniqueHandle!" ); - std::vector, Allocator> pipelines( vectorAllocator ); - pipelines.reserve( createInfos.size() ); - Pipeline* buffer = reinterpret_cast( reinterpret_cast( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle ) - sizeof( Pipeline ) ) ); - Result result = static_cast(d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); - if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + std::vector, Allocator> uniquePipelines( vectorAllocator ); + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(pipelines.data()) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) { + uniquePipelines.reserve( createInfos.size() ); ObjectDestroy deleter( *this, allocator, d ); for ( size_t i=0 ; i( buffer[i], deleter ) ); + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); } } - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique" ); + return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); } template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const { Pipeline pipeline; Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineNVUnique", deleter ); + return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineNVUnique", { Result::eSuccess, Result::ePipelineCompileRequiredEXT }, deleter ); } #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateRenderPass( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pRenderPass ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createRenderPass( const RenderPassCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::createRenderPass( const RenderPassCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::RenderPass renderPass; Result result = static_cast( d.vkCreateRenderPass( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); @@ -63786,7 +83322,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::RenderPass renderPass; Result result = static_cast( d.vkCreateRenderPass( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); @@ -63798,13 +83334,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateRenderPass2( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pRenderPass ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createRenderPass2( const RenderPassCreateInfo2 & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::createRenderPass2( const RenderPassCreateInfo2 & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::RenderPass renderPass; Result result = static_cast( d.vkCreateRenderPass2( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); @@ -63812,7 +83348,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::RenderPass renderPass; Result result = static_cast( d.vkCreateRenderPass2( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); @@ -63824,13 +83360,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pRenderPass ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::RenderPass renderPass; Result result = static_cast( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); @@ -63838,7 +83374,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::RenderPass renderPass; Result result = static_cast( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); @@ -63850,13 +83386,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Sampler* pSampler, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Sampler* pSampler, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateSampler( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSampler ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createSampler( const SamplerCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::createSampler( const SamplerCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::Sampler sampler; Result result = static_cast( d.vkCreateSampler( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &sampler ) ) ); @@ -63864,7 +83400,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSamplerUnique( const SamplerCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSamplerUnique( const SamplerCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::Sampler sampler; Result result = static_cast( d.vkCreateSampler( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &sampler ) ) ); @@ -63876,13 +83412,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pYcbcrConversion ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; Result result = static_cast( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); @@ -63890,7 +83426,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; Result result = static_cast( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); @@ -63902,13 +83438,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pYcbcrConversion ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; Result result = static_cast( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); @@ -63916,7 +83452,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; Result result = static_cast( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); @@ -63928,13 +83464,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Semaphore* pSemaphore, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Semaphore* pSemaphore, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateSemaphore( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSemaphore ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createSemaphore( const SemaphoreCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::createSemaphore( const SemaphoreCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::Semaphore semaphore; Result result = static_cast( d.vkCreateSemaphore( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &semaphore ) ) ); @@ -63942,7 +83478,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::Semaphore semaphore; Result result = static_cast( d.vkCreateSemaphore( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &semaphore ) ) ); @@ -63954,13 +83490,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ShaderModule* pShaderModule, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ShaderModule* pShaderModule, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateShaderModule( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pShaderModule ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; Result result = static_cast( d.vkCreateShaderModule( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &shaderModule ) ) ); @@ -63968,7 +83504,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; Result result = static_cast( d.vkCreateShaderModule( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &shaderModule ) ) ); @@ -63980,73 +83516,71 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateSharedSwapchainsKHR( m_device, swapchainCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pSwapchains ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSharedSwapchainsKHR( ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSharedSwapchainsKHR( ArrayProxy const &createInfos, Optional allocator, Dispatch const &d ) const { std::vector swapchains( createInfos.size() ); Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( swapchains.data() ) ) ); return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainsKHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSharedSwapchainsKHR( ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSharedSwapchainsKHR( ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector swapchains( createInfos.size(), vectorAllocator ); Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( swapchains.data() ) ) ); return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainsKHR" ); } template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const { SwapchainKHR swapchain; Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainKHR" ); } #ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy const &createInfos, Optional allocator, Dispatch const &d ) const { - static_assert( sizeof( SwapchainKHR ) <= sizeof( UniqueHandle ), "SwapchainKHR is greater than UniqueHandle!" ); - std::vector, Allocator> swapchainKHRs; - swapchainKHRs.reserve( createInfos.size() ); - SwapchainKHR* buffer = reinterpret_cast( reinterpret_cast( swapchainKHRs.data() ) + createInfos.size() * ( sizeof( UniqueHandle ) - sizeof( SwapchainKHR ) ) ); - Result result = static_cast(d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); + std::vector, Allocator> uniqueSwapchainKHRs; + std::vector swapchainKHRs( createInfos.size() ); + Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(swapchainKHRs.data()) ) ); if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) { + uniqueSwapchainKHRs.reserve( createInfos.size() ); ObjectDestroy deleter( *this, allocator, d ); for ( size_t i=0 ; i( buffer[i], deleter ) ); + uniqueSwapchainKHRs.push_back( UniqueHandle( swapchainKHRs[i], deleter ) ); } } - return createResultValue( result, swapchainKHRs, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); + return createResultValue( result, std::move( uniqueSwapchainKHRs ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); } - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + template>::value, int>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const { - static_assert( sizeof( SwapchainKHR ) <= sizeof( UniqueHandle ), "SwapchainKHR is greater than UniqueHandle!" ); - std::vector, Allocator> swapchainKHRs( vectorAllocator ); - swapchainKHRs.reserve( createInfos.size() ); - SwapchainKHR* buffer = reinterpret_cast( reinterpret_cast( swapchainKHRs.data() ) + createInfos.size() * ( sizeof( UniqueHandle ) - sizeof( SwapchainKHR ) ) ); - Result result = static_cast(d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); + std::vector, Allocator> uniqueSwapchainKHRs( vectorAllocator ); + std::vector swapchainKHRs( createInfos.size() ); + Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(swapchainKHRs.data()) ) ); if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) { + uniqueSwapchainKHRs.reserve( createInfos.size() ); ObjectDestroy deleter( *this, allocator, d ); for ( size_t i=0 ; i( buffer[i], deleter ) ); + uniqueSwapchainKHRs.push_back( UniqueHandle( swapchainKHRs[i], deleter ) ); } } - return createResultValue( result, swapchainKHRs, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); + return createResultValue( result, std::move( uniqueSwapchainKHRs ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); } template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const { SwapchainKHR swapchain; Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); @@ -64058,13 +83592,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateSwapchainKHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSwapchain ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; Result result = static_cast( d.vkCreateSwapchainKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); @@ -64072,7 +83606,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; Result result = static_cast( d.vkCreateSwapchainKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); @@ -64110,13 +83644,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast( pNameInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d ) const { Result result = static_cast( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::debugMarkerSetObjectNameEXT" ); @@ -64124,42 +83658,84 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d ) const { Result result = static_cast( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::debugMarkerSetObjectTagEXT" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); + return static_cast( d.vkDeferredOperationJoinKHR( m_device, static_cast( operation ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d ) const + { + Result result = static_cast( d.vkDeferredOperationJoinKHR( m_device, static_cast( operation ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::deferredOperationJoinKHR", { Result::eSuccess, Result::eThreadDoneKHR, Result::eThreadIdleKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyAccelerationStructureKHR( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyAccelerationStructureKHR( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + d.vkDestroyAccelerationStructureKHR( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); +#else + d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + d.vkDestroyAccelerationStructureKHR( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); +#else + d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -64241,6 +83817,36 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDeferredOperationKHR( m_device, static_cast( operation ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDeferredOperationKHR( m_device, static_cast( operation ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDeferredOperationKHR( m_device, static_cast( operation ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDeferredOperationKHR( m_device, static_cast( operation ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -64306,19 +83912,6 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -64332,6 +83925,19 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -64476,54 +84082,28 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); + d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); + d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyObjectTableNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyObjectTableNVX( m_device, static_cast( objectTable ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyObjectTableNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyObjectTableNVX( m_device, static_cast( objectTable ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyObjectTableNVX( m_device, static_cast( objectTable ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyObjectTableNVX( m_device, static_cast( objectTable ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -64605,6 +84185,32 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPrivateDataSlotEXT( m_device, static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPrivateDataSlotEXT( m_device, static_cast( privateDataSlot ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPrivateDataSlotEXT( m_device, static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPrivateDataSlotEXT( m_device, static_cast( privateDataSlot ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -64696,19 +84302,6 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -64722,6 +84315,19 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -64828,13 +84434,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE Result Device::waitIdle(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle(Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkDeviceWaitIdle( m_device ) ); } #else template - VULKAN_HPP_INLINE typename ResultValueType::type Device::waitIdle(Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::waitIdle(Dispatch const &d ) const { Result result = static_cast( d.vkDeviceWaitIdle( m_device ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitIdle" ); @@ -64856,13 +84462,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::flushMappedMemoryRanges( ArrayProxy memoryRanges, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::flushMappedMemoryRanges( ArrayProxy const &memoryRanges, Dispatch const &d ) const { Result result = static_cast( d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast( memoryRanges.data() ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::flushMappedMemoryRanges" ); @@ -64876,7 +84482,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy const &commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBuffers.size() , reinterpret_cast( commandBuffers.data() ) ); } @@ -64889,7 +84495,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy const &commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBuffers.size() , reinterpret_cast( commandBuffers.data() ) ); } @@ -64902,7 +84508,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy descriptorSets, Dispatch const &d ) const + VULKAN_HPP_INLINE typename ResultValueType::type Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy const &descriptorSets, Dispatch const &d ) const { Result result = static_cast( d.vkFreeDescriptorSets( m_device, static_cast( descriptorPool ), descriptorSets.size() , reinterpret_cast( descriptorSets.data() ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::freeDescriptorSets" ); @@ -64916,7 +84522,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy descriptorSets, Dispatch const &d ) const + VULKAN_HPP_INLINE typename ResultValueType::type Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy const &descriptorSets, Dispatch const &d ) const { Result result = static_cast( d.vkFreeDescriptorSets( m_device, static_cast( descriptorPool ), descriptorSets.size() , reinterpret_cast( descriptorSets.data() ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::free" ); @@ -64949,19 +84555,83 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS template - VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), dataSize, pData ) ); + return static_cast( d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, ArrayProxy data, Dispatch const &d ) const + template + VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const AccelerationStructureDeviceAddressInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { - Result result = static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getAccelerationStructureHandleNV" ); + return d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast( &info ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.") + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, ArrayProxy const &data, Dispatch const &d ) const + { + Result result = static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getAccelerationStructureHandleNV" ); + + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, size_t dataSize, Dispatch const &d ) const + { + VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 ); + std::vector data( dataSize / sizeof( T ) ); + Result result = static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); + + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Dispatch const &d ) const + { + T data; + Result result = static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), 1 * sizeof( T ) , reinterpret_cast( &data ) ) ); + + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetAccelerationStructureMemoryRequirementsKHR( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getAccelerationStructureMemoryRequirementsKHR( const AccelerationStructureMemoryRequirementsInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetAccelerationStructureMemoryRequirementsKHR( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + template + VULKAN_HPP_INLINE StructureChain Device::getAccelerationStructureMemoryRequirementsKHR( const AccelerationStructureMemoryRequirementsInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get(); + d.vkGetAccelerationStructureMemoryRequirementsKHR( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ template VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT @@ -64988,20 +84658,20 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_ANDROID_KHR template - VULKAN_HPP_INLINE Result Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties; Result result = static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast( &properties ) ) ); return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getAndroidHardwareBufferPropertiesANDROID" ); } template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const { StructureChain structureChain; VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID& properties = structureChain.template get(); @@ -65138,13 +84808,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount, const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount, const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetCalibratedTimestampsEXT( m_device, timestampCount, reinterpret_cast( pTimestampInfos ), pTimestamps, pMaxDeviation ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getCalibratedTimestampsEXT( ArrayProxy timestampInfos, ArrayProxy timestamps, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getCalibratedTimestampsEXT( ArrayProxy const ×tampInfos, ArrayProxy const ×tamps, Dispatch const &d ) const { #ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( timestampInfos.size() == timestamps.size() ); @@ -65160,6 +84830,39 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast( operation ) ); + } +#else + template + VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast( operation ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifdef VK_ENABLE_BETA_EXTENSIONS +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetDeferredOperationResultKHR( m_device, static_cast( operation ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d ) const + { + Result result = static_cast( d.vkGetDeferredOperationResultKHR( m_device, static_cast( operation ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getDeferredOperationResultKHR", { Result::eSuccess, Result::eNotReady } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -65206,6 +84909,22 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_INLINE Result Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionKHR* version, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, reinterpret_cast( version ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionKHR & version, Dispatch const &d ) const + { + Result result = static_cast( d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, reinterpret_cast( &version ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getAccelerationStructureCompatibilityKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -65237,13 +84956,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast( pDeviceGroupPresentCapabilities ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getGroupPresentCapabilitiesKHR(Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getGroupPresentCapabilitiesKHR(Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities; Result result = static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast( &deviceGroupPresentCapabilities ) ) ); @@ -65253,13 +84972,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_WIN32_KHR template - VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( m_device, reinterpret_cast( pSurfaceInfo ), reinterpret_cast( pModes ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; Result result = static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( m_device, reinterpret_cast( &surfaceInfo ), reinterpret_cast( &modes ) ) ); @@ -65269,13 +84988,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_WIN32_KHR*/ template - VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast( surface ), reinterpret_cast( pModes ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; Result result = static_cast( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast( surface ), reinterpret_cast( &modes ) ) ); @@ -65369,13 +85088,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetEventStatus( m_device, static_cast( event ) ) ); } #else template - VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d ) const { Result result = static_cast( d.vkGetEventStatus( m_device, static_cast( event ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getEventStatus", { Result::eEventSet, Result::eEventReset } ); @@ -65383,13 +85102,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetFenceFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d ) const { int fd; Result result = static_cast( d.vkGetFenceFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); @@ -65399,13 +85118,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetFenceStatus( m_device, static_cast( fence ) ) ); } #else template - VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const { Result result = static_cast( d.vkGetFenceStatus( m_device, static_cast( fence ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceStatus", { Result::eSuccess, Result::eNotReady } ); @@ -65414,13 +85133,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_WIN32_KHR template - VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const { HANDLE handle; Result result = static_cast( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); @@ -65429,6 +85148,29 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_USE_PLATFORM_WIN32_KHR*/ + template + VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + template + VULKAN_HPP_INLINE StructureChain Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get(); + d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -65511,7 +85253,7 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d ) const { std::vector sparseMemoryRequirements; @@ -65521,7 +85263,7 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); return sparseMemoryRequirements; } - template + template::value, int>::type> VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector sparseMemoryRequirements( vectorAllocator ); @@ -65539,7 +85281,7 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d ) const { std::vector sparseMemoryRequirements; @@ -65549,7 +85291,7 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); return sparseMemoryRequirements; } - template + template::value, int>::type> VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector sparseMemoryRequirements( vectorAllocator ); @@ -65567,7 +85309,7 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d ) const { std::vector sparseMemoryRequirements; @@ -65577,7 +85319,7 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); return sparseMemoryRequirements; } - template + template::value, int>::type> VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector sparseMemoryRequirements( vectorAllocator ); @@ -65604,6 +85346,21 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetImageViewAddressNVX( m_device, static_cast( imageView ), reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties; + Result result = static_cast( d.vkGetImageViewAddressNVX( m_device, static_cast( imageView ), reinterpret_cast( &properties ) ) ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getImageViewAddressNVX" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -65619,13 +85376,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_ANDROID_KHR template - VULKAN_HPP_INLINE Result Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast( pInfo ), pBuffer ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d ) const { struct AHardwareBuffer* buffer; Result result = static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast( &info ), &buffer ) ); @@ -65635,13 +85392,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ template - VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetMemoryFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d ) const { int fd; Result result = static_cast( d.vkGetMemoryFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); @@ -65650,13 +85407,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast( handleType ), fd, reinterpret_cast( pMemoryFdProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties; Result result = static_cast( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast( handleType ), fd, reinterpret_cast( &memoryFdProperties ) ) ); @@ -65665,13 +85422,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast( handleType ), pHostPointer, reinterpret_cast( pMemoryHostPointerProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties; Result result = static_cast( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast( handleType ), pHostPointer, reinterpret_cast( &memoryHostPointerProperties ) ) ); @@ -65681,13 +85438,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_WIN32_KHR template - VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const { HANDLE handle; Result result = static_cast( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); @@ -65698,13 +85455,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_WIN32_KHR template - VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetMemoryWin32HandleNV( m_device, static_cast( memory ), static_cast( handleType ), pHandle ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d ) const { HANDLE handle; Result result = static_cast( d.vkGetMemoryWin32HandleNV( m_device, static_cast( memory ), static_cast( handleType ), &handle ) ); @@ -65715,13 +85472,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_WIN32_KHR template - VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast( handleType ), handle, reinterpret_cast( pMemoryWin32HandleProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties; Result result = static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast( handleType ), handle, reinterpret_cast( &memoryWin32HandleProperties ) ) ); @@ -65731,13 +85488,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_WIN32_KHR*/ template - VULKAN_HPP_INLINE Result Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), pPresentationTimingCount, reinterpret_cast( pPresentationTimings ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const { std::vector presentationTimings; uint32_t presentationTimingCount; @@ -65758,8 +85515,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING"::Device::getPastPresentationTimingGOOGLE" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector presentationTimings( vectorAllocator ); uint32_t presentationTimingCount; @@ -65783,13 +85540,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, VULKAN_HPP_NAMESPACE::PerformanceValueINTEL* pValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, VULKAN_HPP_NAMESPACE::PerformanceValueINTEL* pValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetPerformanceParameterINTEL( m_device, static_cast( parameter ), reinterpret_cast( pValue ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value; Result result = static_cast( d.vkGetPerformanceParameterINTEL( m_device, static_cast( parameter ), reinterpret_cast( &value ) ) ); @@ -65798,13 +85555,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), pDataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const &d ) const { std::vector data; size_t dataSize; @@ -65825,8 +85582,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineCacheData" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector data( vectorAllocator ); size_t dataSize; @@ -65850,13 +85607,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR* pInternalRepresentations, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR* pInternalRepresentations, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast( pExecutableInfo ), pInternalRepresentationCount, reinterpret_cast( pInternalRepresentations ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d ) const { std::vector internalRepresentations; uint32_t internalRepresentationCount; @@ -65877,8 +85634,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, internalRepresentations, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableInternalRepresentationsKHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector internalRepresentations( vectorAllocator ); uint32_t internalRepresentationCount; @@ -65902,13 +85659,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( pPipelineInfo ), pExecutableCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const &d ) const { std::vector properties; uint32_t executableCount; @@ -65929,8 +85686,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutablePropertiesKHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector properties( vectorAllocator ); uint32_t executableCount; @@ -65954,13 +85711,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR* pStatistics, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR* pStatistics, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast( pExecutableInfo ), pStatisticCount, reinterpret_cast( pStatistics ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d ) const { std::vector statistics; uint32_t statisticCount; @@ -65981,8 +85738,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, statistics, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableStatisticsKHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector statistics( vectorAllocator ); uint32_t statisticCount; @@ -66006,41 +85763,183 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetQueryPoolResults( m_device, static_cast( queryPool ), firstQuery, queryCount, dataSize, pData, static_cast( stride ), static_cast( flags ) ) ); + d.vkGetPrivateDataEXT( m_device, static_cast( objectType ), objectHandle, static_cast( privateDataSlot ), pData ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy data, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d ) const + template + VULKAN_HPP_INLINE uint64_t Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { - Result result = static_cast( d.vkGetQueryPoolResults( m_device, static_cast( queryPool ), firstQuery, queryCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ), static_cast( stride ), static_cast( flags ) ) ); + uint64_t data; + d.vkGetPrivateDataEXT( m_device, static_cast( objectType ), objectHandle, static_cast( privateDataSlot ), &data ); + return data; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetQueryPoolResults( m_device, static_cast( queryPool ), firstQuery, queryCount, dataSize, pData, static_cast( stride ), static_cast( flags ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.") + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy const &data, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d ) const + { + Result result = static_cast( d.vkGetQueryPoolResults( m_device, static_cast( queryPool ), firstQuery, queryCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ), static_cast( stride ), static_cast( flags ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } ); + + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d ) const + { + VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 ); + std::vector data( dataSize / sizeof( T ) ); + Result result = static_cast( d.vkGetQueryPoolResults( m_device, static_cast( queryPool ), firstQuery, queryCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ), static_cast( stride ), static_cast( flags ) ) ); + + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue Device::getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d ) const + { + T data; + Result result = static_cast( d.vkGetQueryPoolResults( m_device, static_cast( queryPool ), firstQuery, queryCount, 1 * sizeof( T ) , reinterpret_cast( &data ), static_cast( stride ), static_cast( flags ) ) ); + + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + return static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy data, Dispatch const &d ) const + template + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.") + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy const &data, Dispatch const &d ) const { - Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); + Result result = static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); + + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const &d ) const + { + VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 ); + std::vector data( dataSize / sizeof( T ) ); + Result result = static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); + + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const &d ) const + { + T data; + Result result = static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, 1 * sizeof( T ) , reinterpret_cast( &data ) ) ); + + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.") + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy const &data, Dispatch const &d ) const + { + Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getRayTracingShaderGroupHandlesKHR" ); + + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const &d ) const + { + VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 ); + std::vector data( dataSize / sizeof( T ) ); + Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); + + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const &d ) const + { + T data; + Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, 1 * sizeof( T ) , reinterpret_cast( &data ) ) ); + + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.") + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy const &data, Dispatch const &d ) const + { + Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getRayTracingShaderGroupHandlesNV" ); + + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const &d ) const + { + VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 ); + std::vector data( dataSize / sizeof( T ) ); + Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); + + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const &d ) const + { + T data; + Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, 1 * sizeof( T ) , reinterpret_cast( &data ) ) ); + + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template - VULKAN_HPP_INLINE Result Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast( swapchain ), reinterpret_cast( pDisplayTimingProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties; Result result = static_cast( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast( swapchain ), reinterpret_cast( &displayTimingProperties ) ) ); @@ -66064,13 +85963,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetSemaphoreCounterValue( m_device, static_cast( semaphore ), pValue ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d ) const { uint64_t value; Result result = static_cast( d.vkGetSemaphoreCounterValue( m_device, static_cast( semaphore ), &value ) ); @@ -66079,13 +85978,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast( semaphore ), pValue ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d ) const { uint64_t value; Result result = static_cast( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast( semaphore ), &value ) ); @@ -66094,13 +85993,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d ) const { int fd; Result result = static_cast( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); @@ -66110,13 +86009,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_WIN32_KHR template - VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const { HANDLE handle; Result result = static_cast( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); @@ -66126,13 +86025,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_WIN32_KHR*/ template - VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), pInfoSize, pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Dispatch const &d ) const { std::vector info; size_t infoSize; @@ -66153,8 +86052,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING"::Device::getShaderInfoAMD" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector info( vectorAllocator ); size_t infoSize; @@ -66178,13 +86077,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetSwapchainCounterEXT( m_device, static_cast( swapchain ), static_cast( counter ), pCounterValue ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const &d ) const { uint64_t counterValue; Result result = static_cast( d.vkGetSwapchainCounterEXT( m_device, static_cast( swapchain ), static_cast( counter ), &counterValue ) ); @@ -66193,13 +86092,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VULKAN_HPP_NAMESPACE::Image* pSwapchainImages, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VULKAN_HPP_NAMESPACE::Image* pSwapchainImages, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), pSwapchainImageCount, reinterpret_cast( pSwapchainImages ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const { std::vector swapchainImages; uint32_t swapchainImageCount; @@ -66220,8 +86119,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainImagesKHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector swapchainImages( vectorAllocator ); uint32_t swapchainImageCount; @@ -66246,13 +86145,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ) ); } #else template - VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const { Result result = static_cast( d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainStatusKHR", { Result::eSuccess, Result::eSuboptimalKHR } ); @@ -66260,13 +86159,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), pDataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const &d ) const { std::vector data; size_t dataSize; @@ -66287,8 +86186,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getValidationCacheDataEXT" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector data( vectorAllocator ); size_t dataSize; @@ -66312,13 +86211,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkImportFenceFdKHR( m_device, reinterpret_cast( pImportFenceFdInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d ) const { Result result = static_cast( d.vkImportFenceFdKHR( m_device, reinterpret_cast( &importFenceFdInfo ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importFenceFdKHR" ); @@ -66327,13 +86226,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_WIN32_KHR template - VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast( pImportFenceWin32HandleInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d ) const { Result result = static_cast( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast( &importFenceWin32HandleInfo ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importFenceWin32HandleKHR" ); @@ -66342,13 +86241,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_WIN32_KHR*/ template - VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast( pImportSemaphoreFdInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d ) const { Result result = static_cast( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast( &importSemaphoreFdInfo ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importSemaphoreFdKHR" ); @@ -66357,13 +86256,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_WIN32_KHR template - VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast( pImportSemaphoreWin32HandleInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d ) const { Result result = static_cast( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast( &importSemaphoreWin32HandleInfo ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importSemaphoreWin32HandleKHR" ); @@ -66372,13 +86271,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_WIN32_KHR*/ template - VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast( pInitializeInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const &d ) const { Result result = static_cast( d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast( &initializeInfo ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::initializePerformanceApiINTEL" ); @@ -66386,13 +86285,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::invalidateMappedMemoryRanges( ArrayProxy memoryRanges, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::invalidateMappedMemoryRanges( ArrayProxy const &memoryRanges, Dispatch const &d ) const { Result result = static_cast( d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast( memoryRanges.data() ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::invalidateMappedMemoryRanges" ); @@ -66400,13 +86299,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void** ppData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void** ppData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkMapMemory( m_device, static_cast( memory ), static_cast( offset ), static_cast( size ), static_cast( flags ), ppData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, Dispatch const &d ) const { void* pData; Result result = static_cast( d.vkMapMemory( m_device, static_cast( memory ), static_cast( offset ), static_cast( size ), static_cast( flags ), &pData ) ); @@ -66415,13 +86314,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::PipelineCache* pSrcCaches, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::PipelineCache* pSrcCaches, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkMergePipelineCaches( m_device, static_cast( dstCache ), srcCacheCount, reinterpret_cast( pSrcCaches ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, ArrayProxy srcCaches, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, ArrayProxy const &srcCaches, Dispatch const &d ) const { Result result = static_cast( d.vkMergePipelineCaches( m_device, static_cast( dstCache ), srcCaches.size() , reinterpret_cast( srcCaches.data() ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::mergePipelineCaches" ); @@ -66429,13 +86328,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pSrcCaches, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pSrcCaches, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkMergeValidationCachesEXT( m_device, static_cast( dstCache ), srcCacheCount, reinterpret_cast( pSrcCaches ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, ArrayProxy srcCaches, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, ArrayProxy const &srcCaches, Dispatch const &d ) const { Result result = static_cast( d.vkMergeValidationCachesEXT( m_device, static_cast( dstCache ), srcCaches.size() , reinterpret_cast( srcCaches.data() ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::mergeValidationCachesEXT" ); @@ -66494,38 +86393,16 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::registerObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, uint32_t objectCount, const VULKAN_HPP_NAMESPACE::ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkRegisterObjectsNVX( m_device, static_cast( objectTable ), objectCount, reinterpret_cast( ppObjectTableEntries ), pObjectIndices ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::registerObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, ArrayProxy pObjectTableEntries, ArrayProxy objectIndices, Dispatch const &d ) const - { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( pObjectTableEntries.size() == objectIndices.size() ); -#else - if ( pObjectTableEntries.size() != objectIndices.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::registerObjectsNVX: pObjectTableEntries.size() != objectIndices.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - Result result = static_cast( d.vkRegisterObjectsNVX( m_device, static_cast( objectTable ), pObjectTableEntries.size() , reinterpret_cast( pObjectTableEntries.data() ), objectIndices.data() ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::registerObjectsNVX" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - #ifdef VK_USE_PLATFORM_WIN32_KHR #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); } #else template - VULKAN_HPP_INLINE typename ResultValueType::type Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const { Result result = static_cast( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::releaseFullScreenExclusiveModeEXT" ); @@ -66535,13 +86412,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ) ); } #else template - VULKAN_HPP_INLINE typename ResultValueType::type Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d ) const { Result result = static_cast( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::releasePerformanceConfigurationINTEL" ); @@ -66614,7 +86491,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::resetFences( ArrayProxy fences, Dispatch const &d ) const + VULKAN_HPP_INLINE typename ResultValueType::type Device::resetFences( ArrayProxy const &fences, Dispatch const &d ) const { Result result = static_cast( d.vkResetFences( m_device, fences.size() , reinterpret_cast( fences.data() ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetFences" ); @@ -66650,13 +86527,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast( pNameInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d ) const { Result result = static_cast( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setDebugUtilsObjectNameEXT" ); @@ -66664,13 +86541,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d ) const { Result result = static_cast( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setDebugUtilsObjectTagEXT" ); @@ -66679,13 +86556,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); } #else template - VULKAN_HPP_INLINE typename ResultValueType::type Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d ) const { Result result = static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setEvent" ); @@ -66699,7 +86576,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( ArrayProxy swapchains, ArrayProxy metadata, Dispatch const &d ) const + VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( ArrayProxy const &swapchains, ArrayProxy const &metadata, Dispatch const &d ) const { #ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() ); @@ -66727,14 +86604,29 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE Result Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE Result Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t data, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkSetPrivateDataEXT( m_device, static_cast( objectType ), objectHandle, static_cast( privateDataSlot ), data ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t data, Dispatch const &d ) const + { + Result result = static_cast( d.vkSetPrivateDataEXT( m_device, static_cast( objectType ), objectHandle, static_cast( privateDataSlot ), data ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setPrivateDataEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkSignalSemaphore( m_device, reinterpret_cast( pSignalInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const &d ) const { Result result = static_cast( d.vkSignalSemaphore( m_device, reinterpret_cast( &signalInfo ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::signalSemaphore" ); @@ -66742,13 +86634,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast( pSignalInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const &d ) const { Result result = static_cast( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast( &signalInfo ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::signalSemaphoreKHR" ); @@ -66811,28 +86703,6 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::unregisterObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, uint32_t objectCount, const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkUnregisterObjectsNVX( m_device, static_cast( objectTable ), objectCount, reinterpret_cast( pObjectEntryTypes ), pObjectIndices ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::unregisterObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, ArrayProxy objectEntryTypes, ArrayProxy objectIndices, Dispatch const &d ) const - { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( objectEntryTypes.size() == objectIndices.size() ); -#else - if ( objectEntryTypes.size() != objectIndices.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::unregisterObjectsNVX: objectEntryTypes.size() != objectIndices.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - Result result = static_cast( d.vkUnregisterObjectsNVX( m_device, static_cast( objectTable ), objectEntryTypes.size() , reinterpret_cast( objectEntryTypes.data() ), objectIndices.data() ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::unregisterObjectsNVX" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT @@ -66868,20 +86738,20 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::updateDescriptorSets( ArrayProxy descriptorWrites, ArrayProxy descriptorCopies, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::updateDescriptorSets( ArrayProxy const &descriptorWrites, ArrayProxy const &descriptorCopies, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { d.vkUpdateDescriptorSets( m_device, descriptorWrites.size() , reinterpret_cast( descriptorWrites.data() ), descriptorCopies.size() , reinterpret_cast( descriptorCopies.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkWaitForFences( m_device, fenceCount, reinterpret_cast( pFences ), static_cast( waitAll ), timeout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE Result Device::waitForFences( ArrayProxy fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForFences( ArrayProxy const &fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d ) const { Result result = static_cast( d.vkWaitForFences( m_device, fences.size() , reinterpret_cast( fences.data() ), static_cast( waitAll ), timeout ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitForFences", { Result::eSuccess, Result::eTimeout } ); @@ -66889,13 +86759,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkWaitSemaphores( m_device, reinterpret_cast( pWaitInfo ), timeout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE Result Device::waitSemaphores( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d ) const { Result result = static_cast( d.vkWaitSemaphores( m_device, reinterpret_cast( &waitInfo ), timeout ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitSemaphores", { Result::eSuccess, Result::eTimeout } ); @@ -66903,28 +86773,44 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast( pWaitInfo ), timeout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d ) const { Result result = static_cast( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast( &waitInfo ), timeout ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitSemaphoresKHR", { Result::eSuccess, Result::eTimeout } ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, void* pData, size_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( m_device, accelerationStructureCount, reinterpret_cast( pAccelerationStructures ), static_cast( queryType ), dataSize, pData, stride ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::writeAccelerationStructuresPropertiesKHR( ArrayProxy const &accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, ArrayProxy const &data, size_t stride, Dispatch const &d ) const + { + Result result = static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( m_device, accelerationStructures.size() , reinterpret_cast( accelerationStructures.data() ), static_cast( queryType ), data.size() * sizeof( T ) , reinterpret_cast( data.data() ), stride ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::writeAccelerationStructuresPropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + #ifdef VK_USE_PLATFORM_ANDROID_KHR template - VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; Result result = static_cast( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); @@ -66932,7 +86818,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; Result result = static_cast( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); @@ -66996,14 +86882,42 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT template - VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateDirectFBSurfaceEXT( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Instance::createDirectFBSurfaceEXT( const DirectFBSurfaceCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateDirectFBSurfaceEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDirectFBSurfaceEXT" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createDirectFBSurfaceEXTUnique( const DirectFBSurfaceCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateDirectFBSurfaceEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDirectFBSurfaceEXTUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; Result result = static_cast( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); @@ -67011,7 +86925,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; Result result = static_cast( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); @@ -67023,13 +86937,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Instance::createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; Result result = static_cast( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); @@ -67037,7 +86951,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; Result result = static_cast( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); @@ -67050,13 +86964,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_IOS_MVK template - VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; Result result = static_cast( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); @@ -67064,7 +86978,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; Result result = static_cast( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); @@ -67078,13 +86992,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_FUCHSIA template - VULKAN_HPP_INLINE Result Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Instance::createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; Result result = static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); @@ -67092,7 +87006,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; Result result = static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); @@ -67106,13 +87020,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_MACOS_MVK template - VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; Result result = static_cast( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); @@ -67120,7 +87034,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; Result result = static_cast( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); @@ -67134,13 +87048,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_METAL_EXT template - VULKAN_HPP_INLINE Result Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Instance::createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; Result result = static_cast( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); @@ -67148,7 +87062,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; Result result = static_cast( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); @@ -67162,13 +87076,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_GGP template - VULKAN_HPP_INLINE Result Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Instance::createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; Result result = static_cast( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); @@ -67176,7 +87090,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; Result result = static_cast( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); @@ -67190,13 +87104,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_VI_NN template - VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; Result result = static_cast( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); @@ -67204,7 +87118,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; Result result = static_cast( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); @@ -67218,13 +87132,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_WAYLAND_KHR template - VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; Result result = static_cast( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); @@ -67232,7 +87146,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; Result result = static_cast( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); @@ -67246,13 +87160,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_WIN32_KHR template - VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; Result result = static_cast( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); @@ -67260,7 +87174,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; Result result = static_cast( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); @@ -67274,13 +87188,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_XCB_KHR template - VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; Result result = static_cast( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); @@ -67288,7 +87202,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; Result result = static_cast( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); @@ -67302,13 +87216,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_XLIB_KHR template - VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; Result result = static_cast( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); @@ -67316,7 +87230,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceKHR surface; Result result = static_cast( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); @@ -67441,13 +87355,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroups(Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroups(Dispatch const &d ) const { std::vector physicalDeviceGroupProperties; uint32_t physicalDeviceGroupCount; @@ -67468,8 +87382,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroups" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroups(Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroups(Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector physicalDeviceGroupProperties( vectorAllocator ); uint32_t physicalDeviceGroupCount; @@ -67493,13 +87407,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroupsKHR(Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroupsKHR(Dispatch const &d ) const { std::vector physicalDeviceGroupProperties; uint32_t physicalDeviceGroupCount; @@ -67520,8 +87434,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroupsKHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroupsKHR(Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroupsKHR(Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector physicalDeviceGroupProperties( vectorAllocator ); uint32_t physicalDeviceGroupCount; @@ -67545,13 +87459,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast( pPhysicalDevices ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDevices(Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDevices(Dispatch const &d ) const { std::vector physicalDevices; uint32_t physicalDeviceCount; @@ -67572,8 +87486,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDevices" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDevices(Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDevices(Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector physicalDevices( vectorAllocator ); uint32_t physicalDeviceCount; @@ -67624,29 +87538,28 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT template - VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display* dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display* dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast( display ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::acquireXlibDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const { - Display dpy; Result result = static_cast( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast( display ) ) ); - return createResultValue( result, dpy, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::acquireXlibDisplayEXT" ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::acquireXlibDisplayEXT" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ template - VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Device* pDevice, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Device* pDevice, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateDevice( m_physicalDevice, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDevice ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::createDevice( const DeviceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::createDevice( const DeviceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::Device device; Result result = static_cast( d.vkCreateDevice( m_physicalDevice, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &device ) ) ); @@ -67654,7 +87567,7 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::createDeviceUnique( const DeviceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::createDeviceUnique( const DeviceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::Device device; Result result = static_cast( d.vkCreateDevice( m_physicalDevice, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &device ) ) ); @@ -67666,13 +87579,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DisplayModeKHR* pMode, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DisplayModeKHR* pMode, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast( display ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pMode ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; Result result = static_cast( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast( display ), reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &mode ) ) ); @@ -67681,13 +87594,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, Dispatch const &d ) const { std::vector properties; uint32_t propertyCount; @@ -67708,8 +87621,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceExtensionProperties" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector properties( vectorAllocator ); uint32_t propertyCount; @@ -67733,13 +87646,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceLayerProperties(Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceLayerProperties(Dispatch const &d ) const { std::vector properties; uint32_t propertyCount; @@ -67760,8 +87673,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceLayerProperties" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector properties( vectorAllocator ); uint32_t propertyCount; @@ -67784,16 +87697,19 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, uint32_t* pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR* pCounters, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR* pCounterDescriptions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, uint32_t* pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR* pCounters, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR* pCounterDescriptions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, pCounterCount, reinterpret_cast( pCounters ), reinterpret_cast( pCounterDescriptions ) ) ); + return static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, pCounterCount, reinterpret_cast( pCounters ), reinterpret_cast( pCounterDescriptions ) ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy counters, Dispatch const &d ) const + template + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.") + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy const &counters, Dispatch const &d ) const { - std::vector counterDescriptions; + std::vector counterDescriptions; uint32_t counterCount; Result result; do @@ -67811,11 +87727,14 @@ namespace VULKAN_HPP_NAMESPACE counterDescriptions.resize( counterCount ); } return createResultValue( result, counterDescriptions, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy counters, Allocator const& vectorAllocator, Dispatch const &d ) const + + template ::value, int>::type> + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.") + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy const &counters, Allocator const& vectorAllocator, Dispatch const &d ) const { - std::vector counterDescriptions( vectorAllocator ); + std::vector counterDescriptions( vectorAllocator ); uint32_t counterCount; Result result; do @@ -67833,17 +87752,68 @@ namespace VULKAN_HPP_NAMESPACE counterDescriptions.resize( counterCount ); } return createResultValue( result, counterDescriptions, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, std::vector>>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const + { + std::pair, std::vector> enumeratedData; + uint32_t counterCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) ); + if ( ( result == Result::eSuccess ) && counterCount ) + { + enumeratedData.first.resize( counterCount ); + enumeratedData.second.resize( counterCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, reinterpret_cast( enumeratedData.first.data() ), reinterpret_cast( enumeratedData.second.data() ) ) ); + VULKAN_HPP_ASSERT( counterCount <= enumeratedData.first.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( counterCount < enumeratedData.first.size() ) ) + { + enumeratedData.first.resize( counterCount ); + enumeratedData.second.resize( counterCount ); + } + return createResultValue( result, enumeratedData, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + } + + template ::value && std::is_same::value, int>::type > + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, std::vector>>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, PerformanceCounterKHRAllocator & countersAllocator, PerformanceCounterDescriptionKHRAllocator & counterDescriptionsAllocator, Dispatch const & d ) const + { + std::pair, std::vector> enumeratedData( std::piecewise_construct, std::forward_as_tuple( countersAllocator ), std::forward_as_tuple( counterDescriptionsAllocator ) ); + uint32_t counterCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) ); + if ( ( result == Result::eSuccess ) && counterCount ) + { + enumeratedData.first.resize( counterCount ); + enumeratedData.second.resize( counterCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, reinterpret_cast( enumeratedData.first.data() ), reinterpret_cast( enumeratedData.second.data() ) ) ); + VULKAN_HPP_ASSERT( counterCount <= enumeratedData.first.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( counterCount < enumeratedData.first.size() ) ) + { + enumeratedData.first.resize( counterCount ); + enumeratedData.second.resize( counterCount ); + } + return createResultValue( result, enumeratedData, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const { std::vector properties; uint32_t propertyCount; @@ -67864,8 +87834,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModeProperties2KHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector properties( vectorAllocator ); uint32_t propertyCount; @@ -67889,13 +87859,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const { std::vector properties; uint32_t propertyCount; @@ -67916,8 +87886,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModePropertiesKHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector properties( vectorAllocator ); uint32_t propertyCount; @@ -67941,13 +87911,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR* pDisplayPlaneInfo, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR* pDisplayPlaneInfo, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast( pDisplayPlaneInfo ), reinterpret_cast( pCapabilities ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities; Result result = static_cast( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast( &displayPlaneInfo ), reinterpret_cast( &capabilities ) ) ); @@ -67956,13 +87926,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast( mode ), planeIndex, reinterpret_cast( pCapabilities ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities; Result result = static_cast( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast( mode ), planeIndex, reinterpret_cast( &capabilities ) ) ); @@ -67971,13 +87941,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplays, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplays, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast( pDisplays ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d ) const { std::vector displays; uint32_t displayCount; @@ -67998,8 +87968,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, displays, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector displays( vectorAllocator ); uint32_t displayCount; @@ -68023,13 +87993,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainEXT* pTimeDomains, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainEXT* pTimeDomains, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, pTimeDomainCount, reinterpret_cast( pTimeDomains ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCalibrateableTimeDomainsEXT(Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCalibrateableTimeDomainsEXT(Dispatch const &d ) const { std::vector timeDomains; uint32_t timeDomainCount; @@ -68050,8 +88020,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, timeDomains, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCalibrateableTimeDomainsEXT(Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCalibrateableTimeDomainsEXT(Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector timeDomains( vectorAllocator ); uint32_t timeDomainCount; @@ -68075,13 +88045,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesNV( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesNV( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCooperativeMatrixPropertiesNV(Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCooperativeMatrixPropertiesNV(Dispatch const &d ) const { std::vector properties; uint32_t propertyCount; @@ -68102,8 +88072,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCooperativeMatrixPropertiesNV(Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCooperativeMatrixPropertiesNV(Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector properties( vectorAllocator ); uint32_t propertyCount; @@ -68126,14 +88096,29 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB* dfb, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, dfb ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, &dfb ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneProperties2KHR(Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneProperties2KHR(Dispatch const &d ) const { std::vector properties; uint32_t propertyCount; @@ -68154,8 +88139,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneProperties2KHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector properties( vectorAllocator ); uint32_t propertyCount; @@ -68179,13 +88164,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlanePropertiesKHR(Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlanePropertiesKHR(Dispatch const &d ) const { std::vector properties; uint32_t propertyCount; @@ -68206,8 +88191,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlanePropertiesKHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlanePropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlanePropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector properties( vectorAllocator ); uint32_t propertyCount; @@ -68231,13 +88216,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayProperties2KHR(Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayProperties2KHR(Dispatch const &d ) const { std::vector properties; uint32_t propertyCount; @@ -68258,8 +88243,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayProperties2KHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector properties( vectorAllocator ); uint32_t propertyCount; @@ -68283,13 +88268,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPropertiesKHR(Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPropertiesKHR(Dispatch const &d ) const { std::vector properties; uint32_t propertyCount; @@ -68310,8 +88295,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPropertiesKHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector properties( vectorAllocator ); uint32_t propertyCount; @@ -68395,13 +88380,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), static_cast( externalHandleType ), reinterpret_cast( pExternalImageFormatProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties; Result result = static_cast( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), static_cast( externalHandleType ), reinterpret_cast( &externalImageFormatProperties ) ) ); @@ -68562,28 +88547,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE void PhysicalDevice::getGeneratedCommandsPropertiesNVX( VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsFeaturesNVX* pFeatures, VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX* pLimits, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast( pFeatures ), reinterpret_cast( pLimits ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX PhysicalDevice::getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX limits; - d.vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast( &features ), reinterpret_cast( &limits ) ); - return limits; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties* pImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties* pImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), reinterpret_cast( pImageFormatProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties; Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), reinterpret_cast( &imageFormatProperties ) ) ); @@ -68592,20 +88562,20 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast( pImageFormatInfo ), reinterpret_cast( pImageFormatProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2" ); } template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const { StructureChain structureChain; VULKAN_HPP_NAMESPACE::ImageFormatProperties2& imageFormatProperties = structureChain.template get(); @@ -68615,20 +88585,20 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( pImageFormatInfo ), reinterpret_cast( pImageFormatProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2KHR" ); } template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const { StructureChain structureChain; VULKAN_HPP_NAMESPACE::ImageFormatProperties2& imageFormatProperties = structureChain.template get(); @@ -68714,13 +88684,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pRectCount, VULKAN_HPP_NAMESPACE::Rect2D* pRects, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pRectCount, VULKAN_HPP_NAMESPACE::Rect2D* pRects, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), pRectCount, reinterpret_cast( pRects ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const { std::vector rects; uint32_t rectCount; @@ -68741,8 +88711,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getPresentRectanglesKHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector rects( vectorAllocator ); uint32_t rectCount; @@ -68847,7 +88817,7 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties(Dispatch const &d ) const { std::vector queueFamilyProperties; @@ -68857,7 +88827,7 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); return queueFamilyProperties; } - template + template::value, int>::type> VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties(Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector queueFamilyProperties( vectorAllocator ); @@ -68875,7 +88845,7 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2(Dispatch const &d ) const { std::vector queueFamilyProperties; @@ -68885,7 +88855,7 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); return queueFamilyProperties; } - template + template::value, int>::type> VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector queueFamilyProperties( vectorAllocator ); @@ -68895,7 +88865,7 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); return queueFamilyProperties; } - template + template VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2(Dispatch const &d ) const { std::vector queueFamilyProperties; @@ -68914,7 +88884,7 @@ namespace VULKAN_HPP_NAMESPACE } return queueFamilyProperties; } - template + template::value, int>::type> VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector queueFamilyProperties( vectorAllocator ); @@ -68941,7 +88911,7 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR(Dispatch const &d ) const { std::vector queueFamilyProperties; @@ -68951,7 +88921,7 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); return queueFamilyProperties; } - template + template::value, int>::type> VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector queueFamilyProperties( vectorAllocator ); @@ -68961,7 +88931,7 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); return queueFamilyProperties; } - template + template VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR(Dispatch const &d ) const { std::vector queueFamilyProperties; @@ -68980,7 +88950,7 @@ namespace VULKAN_HPP_NAMESPACE } return queueFamilyProperties; } - template + template::value, int>::type> VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector queueFamilyProperties( vectorAllocator ); @@ -69007,7 +88977,7 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), pPropertyCount, reinterpret_cast( pProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Dispatch const &d ) const { std::vector properties; @@ -69017,7 +88987,7 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), &propertyCount, reinterpret_cast( properties.data() ) ); return properties; } - template + template::value, int>::type> VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector properties( vectorAllocator ); @@ -69035,7 +89005,7 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( pFormatInfo ), pPropertyCount, reinterpret_cast( pProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d ) const { std::vector properties; @@ -69045,7 +89015,7 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, reinterpret_cast( properties.data() ) ); return properties; } - template + template::value, int>::type> VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector properties( vectorAllocator ); @@ -69063,7 +89033,7 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( pFormatInfo ), pPropertyCount, reinterpret_cast( pProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d ) const { std::vector properties; @@ -69073,7 +89043,7 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, reinterpret_cast( properties.data() ) ); return properties; } - template + template::value, int>::type> VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector properties( vectorAllocator ); @@ -69086,13 +89056,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t* pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV* pCombinations, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t* pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV* pCombinations, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, pCombinationCount, reinterpret_cast( pCombinations ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(Dispatch const &d ) const { std::vector combinations; uint32_t combinationCount; @@ -69113,8 +89083,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, combinations, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector combinations( vectorAllocator ); uint32_t combinationCount; @@ -69138,13 +89108,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast( surface ), reinterpret_cast( pSurfaceCapabilities ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities; Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast( surface ), reinterpret_cast( &surfaceCapabilities ) ) ); @@ -69153,20 +89123,20 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast( pSurfaceInfo ), reinterpret_cast( pSurfaceCapabilities ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities; Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), reinterpret_cast( &surfaceCapabilities ) ) ); return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2KHR" ); } template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const { StructureChain structureChain; VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR& surfaceCapabilities = structureChain.template get(); @@ -69176,13 +89146,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast( surface ), reinterpret_cast( pSurfaceCapabilities ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities; Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast( surface ), reinterpret_cast( &surfaceCapabilities ) ) ); @@ -69191,13 +89161,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( pSurfaceInfo ), pSurfaceFormatCount, reinterpret_cast( pSurfaceFormats ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const { std::vector surfaceFormats; uint32_t surfaceFormatCount; @@ -69218,8 +89188,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormats2KHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector surfaceFormats( vectorAllocator ); uint32_t surfaceFormatCount; @@ -69243,13 +89213,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), pSurfaceFormatCount, reinterpret_cast( pSurfaceFormats ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const { std::vector surfaceFormats; uint32_t surfaceFormatCount; @@ -69270,8 +89240,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormatsKHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector surfaceFormats( vectorAllocator ); uint32_t surfaceFormatCount; @@ -69296,13 +89266,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_WIN32_KHR template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast( pSurfaceInfo ), pPresentModeCount, reinterpret_cast( pPresentModes ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const { std::vector presentModes; uint32_t presentModeCount; @@ -69323,8 +89293,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModes2EXT" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector presentModes( vectorAllocator ); uint32_t presentModeCount; @@ -69349,13 +89319,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_WIN32_KHR*/ template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), pPresentModeCount, reinterpret_cast( pPresentModes ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const { std::vector presentModes; uint32_t presentModeCount; @@ -69376,8 +89346,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModesKHR" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector presentModes( vectorAllocator ); uint32_t presentModeCount; @@ -69401,13 +89371,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::Bool32* pSupported, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::Bool32* pSupported, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast( surface ), reinterpret_cast( pSupported ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const { VULKAN_HPP_NAMESPACE::Bool32 supported; Result result = static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast( surface ), reinterpret_cast( &supported ) ) ); @@ -69416,13 +89386,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result PhysicalDevice::getToolPropertiesEXT( uint32_t* pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT* pToolProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolPropertiesEXT( uint32_t* pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT* pToolProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, pToolCount, reinterpret_cast( pToolProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getToolPropertiesEXT(Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getToolPropertiesEXT(Dispatch const &d ) const { std::vector toolProperties; uint32_t toolCount; @@ -69443,8 +89413,8 @@ namespace VULKAN_HPP_NAMESPACE } return createResultValue( result, toolProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getToolPropertiesEXT" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getToolPropertiesEXT(Allocator const& vectorAllocator, Dispatch const &d ) const + template::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getToolPropertiesEXT(Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector toolProperties( vectorAllocator ); uint32_t toolCount; @@ -69566,7 +89536,7 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template VULKAN_HPP_INLINE std::vector Queue::getCheckpointDataNV(Dispatch const &d ) const { std::vector checkpointData; @@ -69576,7 +89546,7 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); return checkpointData; } - template + template::value, int>::type> VULKAN_HPP_INLINE std::vector Queue::getCheckpointDataNV(Allocator const& vectorAllocator, Dispatch const &d ) const { std::vector checkpointData( vectorAllocator ); @@ -69602,13 +89572,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindSparseInfo* pBindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindSparseInfo* pBindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast( pBindInfo ), static_cast( fence ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Queue::bindSparse( ArrayProxy bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Queue::bindSparse( ArrayProxy const &bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const { Result result = static_cast( d.vkQueueBindSparse( m_queue, bindInfo.size() , reinterpret_cast( bindInfo.data() ), static_cast( fence ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::bindSparse" ); @@ -69643,13 +89613,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR* pPresentInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR* pPresentInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkQueuePresentKHR( m_queue, reinterpret_cast( pPresentInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR & presentInfo, Dispatch const &d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR & presentInfo, Dispatch const &d ) const { Result result = static_cast( d.vkQueuePresentKHR( m_queue, reinterpret_cast( &presentInfo ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::presentKHR", { Result::eSuccess, Result::eSuboptimalKHR } ); @@ -69658,13 +89628,13 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast( configuration ) ) ); } #else template - VULKAN_HPP_INLINE typename ResultValueType::type Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d ) const { Result result = static_cast( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast( configuration ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::setPerformanceConfigurationINTEL" ); @@ -69672,13 +89642,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo* pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo* pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Queue::submit( ArrayProxy submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Queue::submit( ArrayProxy const &submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const { Result result = static_cast( d.vkQueueSubmit( m_queue, submits.size() , reinterpret_cast( submits.data() ), static_cast( fence ) ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::submit" ); @@ -69687,364 +89657,437 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE Result Queue::waitIdle(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle(Dispatch const &d) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkQueueWaitIdle( m_queue ) ); } #else template - VULKAN_HPP_INLINE typename ResultValueType::type Queue::waitIdle(Dispatch const &d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Queue::waitIdle(Dispatch const &d ) const { Result result = static_cast( d.vkQueueWaitIdle( m_queue ) ); return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::waitIdle" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - #ifdef VK_USE_PLATFORM_ANDROID_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #ifdef VK_USE_PLATFORM_ANDROID_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; #ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; +#ifdef VK_ENABLE_BETA_EXTENSIONS + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; #ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; #ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; #ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_ANDROID_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; #ifdef VK_USE_PLATFORM_ANDROID_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; #ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; +#ifdef VK_ENABLE_BETA_EXTENSIONS + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template <> struct StructExtends{ enum { value = true }; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; +#ifdef VK_ENABLE_BETA_EXTENSIONS + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + template <> struct StructExtends{ enum { value = true }; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; +#ifdef VK_ENABLE_BETA_EXTENSIONS + template <> struct StructExtends{ enum { value = true }; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; #ifdef VK_USE_PLATFORM_GGP - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; #endif /*VK_USE_PLATFORM_GGP*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; #ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; #ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; #if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL class DynamicLoader { public: -#ifdef VULKAN_HPP_NO_EXCEPTIONS - DynamicLoader() VULKAN_HPP_NOEXCEPT : m_success( false ) -#else - DynamicLoader() : m_success( false ) -#endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + DynamicLoader( std::string const & vulkanLibraryName = {} ) VULKAN_HPP_NOEXCEPT : m_success( false ) +# else + DynamicLoader( std::string const & vulkanLibraryName = {} ) : m_success( false ) +# endif { -#if defined(__linux__) - m_library = dlopen( "libvulkan.so", RTLD_NOW | RTLD_LOCAL ); -#elif defined(__APPLE__) - m_library = dlopen( "libvulkan.dylib", RTLD_NOW | RTLD_LOCAL ); -#elif defined(_WIN32) - m_library = LoadLibrary( TEXT( "vulkan-1.dll" ) ); -#else - assert( false && "unsupported platform" ); -#endif + if ( !vulkanLibraryName.empty() ) + { +# if defined( __linux__ ) || defined( __APPLE__ ) + m_library = dlopen( vulkanLibraryName.c_str(), RTLD_NOW | RTLD_LOCAL ); +# elif defined( _WIN32 ) + m_library = ::LoadLibraryA( vulkanLibraryName.c_str() ); +# else +# error unsupported platform +# endif + } + else + { +# if defined( __linux__ ) + m_library = dlopen( "libvulkan.so", RTLD_NOW | RTLD_LOCAL ); + if ( m_library == nullptr ) + { + m_library = dlopen( "libvulkan.so.1", RTLD_NOW | RTLD_LOCAL ); + } +# elif defined( __APPLE__ ) + m_library = dlopen( "libvulkan.dylib", RTLD_NOW | RTLD_LOCAL ); +# elif defined( _WIN32 ) + m_library = ::LoadLibraryA( "vulkan-1.dll" ); +# else +# error unsupported platform +# endif + } - m_success = m_library != 0; + m_success = (m_library != nullptr); #ifndef VULKAN_HPP_NO_EXCEPTIONS if ( !m_success ) { @@ -70054,93 +90097,160 @@ namespace VULKAN_HPP_NAMESPACE #endif } + DynamicLoader( DynamicLoader const& ) = delete; + + DynamicLoader( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT + : m_success(other.m_success) + , m_library(other.m_library) + { + other.m_library = nullptr; + } + + DynamicLoader &operator=( DynamicLoader const& ) = delete; + + DynamicLoader &operator=( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT + { + m_success = other.m_success; + std::swap(m_library, other.m_library); + return *this; + } + ~DynamicLoader() VULKAN_HPP_NOEXCEPT { if ( m_library ) { -#if defined(__linux__) || defined(__APPLE__) +# if defined( __linux__ ) || defined( __APPLE__ ) dlclose( m_library ); -#elif defined(_WIN32) - FreeLibrary( m_library ); -#endif +# elif defined( _WIN32 ) + ::FreeLibrary( m_library ); +# else +# error unsupported platform +# endif } } template T getProcAddress( const char* function ) const VULKAN_HPP_NOEXCEPT { -#if defined(__linux__) || defined(__APPLE__) +# if defined( __linux__ ) || defined( __APPLE__ ) return (T)dlsym( m_library, function ); -#elif defined(_WIN32) - return (T)GetProcAddress( m_library, function ); -#endif +# elif defined( _WIN32 ) + return (T)::GetProcAddress( m_library, function ); +# else +# error unsupported platform +# endif } bool success() const VULKAN_HPP_NOEXCEPT { return m_success; } private: bool m_success; -#if defined(__linux__) || defined(__APPLE__) - void *m_library; -#elif defined(_WIN32) - HMODULE m_library; -#else -#error unsupported platform -#endif +# if defined( __linux__ ) || defined( __APPLE__ ) + void * m_library; +# elif defined( _WIN32 ) + ::HINSTANCE m_library; +# else +# error unsupported platform +# endif }; #endif + class DispatchLoaderDynamic { public: - PFN_vkCreateInstance vkCreateInstance = 0; - PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; - PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; - PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; + PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; + PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0; + PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0; +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT + PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; + PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; + PFN_vkAllocateMemory vkAllocateMemory = 0; PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; + PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkBindAccelerationStructureMemoryKHR vkBindAccelerationStructureMemoryKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + PFN_vkBindBufferMemory vkBindBufferMemory = 0; + PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; + PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; + PFN_vkBindImageMemory vkBindImageMemory = 0; + PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; + PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkBuildAccelerationStructureKHR vkBuildAccelerationStructureKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0; PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; - PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0; PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; + PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0; PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; + PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0; PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; + PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0; PFN_vkCmdBlitImage vkCmdBlitImage = 0; + PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCmdBuildAccelerationStructureIndirectKHR vkCmdBuildAccelerationStructureIndirectKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCmdBuildAccelerationStructureKHR vkCmdBuildAccelerationStructureKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0; PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; + PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0; PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; + PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0; PFN_vkCmdCopyImage vkCmdCopyImage = 0; + PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0; PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; + PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; PFN_vkCmdDispatch vkCmdDispatch = 0; - PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; + PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; PFN_vkCmdDraw vkCmdDraw = 0; PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; - PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0; PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; + PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0; PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; - PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0; PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; + PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0; PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; @@ -70149,185 +90259,295 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdEndQuery vkCmdEndQuery = 0; PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; - PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0; PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; + PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0; PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; + PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0; PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; - PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0; PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; + PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0; PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; - PFN_vkCmdProcessCommandsNVX vkCmdProcessCommandsNVX = 0; + PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0; PFN_vkCmdPushConstants vkCmdPushConstants = 0; PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; - PFN_vkCmdReserveSpaceForCommandsNVX vkCmdReserveSpaceForCommandsNVX = 0; PFN_vkCmdResetEvent vkCmdResetEvent = 0; PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; PFN_vkCmdResolveImage vkCmdResolveImage = 0; + PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0; PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; + PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0; PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; - PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; + PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0; + PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0; + PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0; + PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0; PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; + PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; PFN_vkCmdSetEvent vkCmdSetEvent = 0; PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; + PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0; PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0; PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0; PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0; PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0; + PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0; PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; PFN_vkCmdSetScissor vkCmdSetScissor = 0; + PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0; PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; + PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0; PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; + PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0; PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; PFN_vkCmdSetViewport vkCmdSetViewport = 0; PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; + PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; - PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; - PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; -#ifdef VK_USE_PLATFORM_WIN32_KHR - PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; - PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; - PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0; - PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0; - PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; - PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; - PFN_vkAllocateMemory vkAllocateMemory = 0; - PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; - PFN_vkBindBufferMemory vkBindBufferMemory = 0; - PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; - PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; - PFN_vkBindImageMemory vkBindImageMemory = 0; - PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; - PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; PFN_vkCompileDeferredNV vkCompileDeferredNV = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; +#ifdef VK_USE_PLATFORM_ANDROID_KHR + PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ PFN_vkCreateBuffer vkCreateBuffer = 0; PFN_vkCreateBufferView vkCreateBufferView = 0; PFN_vkCreateCommandPool vkCreateCommandPool = 0; PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; + PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; + PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; - PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; + PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; + PFN_vkCreateDevice vkCreateDevice = 0; +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; + PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; PFN_vkCreateEvent vkCreateEvent = 0; PFN_vkCreateFence vkCreateFence = 0; PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; + PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; +#ifdef VK_USE_PLATFORM_IOS_MVK + PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ PFN_vkCreateImage vkCreateImage = 0; +#ifdef VK_USE_PLATFORM_FUCHSIA + PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ PFN_vkCreateImageView vkCreateImageView = 0; - PFN_vkCreateIndirectCommandsLayoutNVX vkCreateIndirectCommandsLayoutNVX = 0; - PFN_vkCreateObjectTableNVX vkCreateObjectTableNVX = 0; + PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0; + PFN_vkCreateInstance vkCreateInstance = 0; +#ifdef VK_USE_PLATFORM_MACOS_MVK + PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ +#ifdef VK_USE_PLATFORM_METAL_EXT + PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; + PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0; PFN_vkCreateQueryPool vkCreateQueryPool = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; PFN_vkCreateRenderPass vkCreateRenderPass = 0; - PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0; PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; + PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0; PFN_vkCreateSampler vkCreateSampler = 0; - PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; + PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; PFN_vkCreateSemaphore vkCreateSemaphore = 0; PFN_vkCreateShaderModule vkCreateShaderModule = 0; PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; +#ifdef VK_USE_PLATFORM_GGP + PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0; +#endif /*VK_USE_PLATFORM_GGP*/ PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; +#ifdef VK_USE_PLATFORM_VI_NN + PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; +#endif /*VK_USE_PLATFORM_VI_NN*/ +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_XCB_KHR + PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#ifdef VK_USE_PLATFORM_XLIB_KHR + PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; + PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkDestroyBuffer vkDestroyBuffer = 0; PFN_vkDestroyBufferView vkDestroyBufferView = 0; PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; + PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; + PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; - PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; + PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; PFN_vkDestroyDevice vkDestroyDevice = 0; PFN_vkDestroyEvent vkDestroyEvent = 0; PFN_vkDestroyFence vkDestroyFence = 0; PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; PFN_vkDestroyImage vkDestroyImage = 0; PFN_vkDestroyImageView vkDestroyImageView = 0; - PFN_vkDestroyIndirectCommandsLayoutNVX vkDestroyIndirectCommandsLayoutNVX = 0; - PFN_vkDestroyObjectTableNVX vkDestroyObjectTableNVX = 0; + PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0; + PFN_vkDestroyInstance vkDestroyInstance = 0; PFN_vkDestroyPipeline vkDestroyPipeline = 0; PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; + PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0; PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; PFN_vkDestroySampler vkDestroySampler = 0; - PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; + PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; PFN_vkDestroySemaphore vkDestroySemaphore = 0; PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; + PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; + PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; + PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; + PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; + PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; + PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; + PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; + PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; + PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; + PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0; + PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; PFN_vkFreeMemory vkFreeMemory = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkGetAccelerationStructureMemoryRequirementsKHR vkGetAccelerationStructureMemoryRequirementsKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0; #ifdef VK_USE_PLATFORM_ANDROID_KHR PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0; PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0; PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0; + PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0; PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; - PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; - PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0; + PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0; + PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0; PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; - PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; - PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; + PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; + PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; #ifdef VK_USE_PLATFORM_WIN32_KHR PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; - PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0; PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0; PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; + PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; + PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; + PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; + PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; + PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; PFN_vkGetEventStatus vkGetEventStatus = 0; PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; PFN_vkGetFenceStatus vkGetFenceStatus = 0; #ifdef VK_USE_PLATFORM_WIN32_KHR PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0; PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; - PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; + PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; - PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; + PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; + PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; #ifdef VK_USE_PLATFORM_ANDROID_KHR PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ @@ -70345,168 +90565,46 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_WIN32_KHR*/ PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0; - PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; - PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; - PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; - PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; - PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; - PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; - PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; - PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; - PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0; - PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; - PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; -#ifdef VK_USE_PLATFORM_WIN32_KHR - PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; - PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; - PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; - PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; - PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; - PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; -#ifdef VK_USE_PLATFORM_WIN32_KHR - PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; -#ifdef VK_USE_PLATFORM_WIN32_KHR - PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0; - PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; - PFN_vkMapMemory vkMapMemory = 0; - PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; - PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; - PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; - PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; - PFN_vkRegisterObjectsNVX vkRegisterObjectsNVX = 0; -#ifdef VK_USE_PLATFORM_WIN32_KHR - PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0; - PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0; - PFN_vkResetCommandPool vkResetCommandPool = 0; - PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; - PFN_vkResetEvent vkResetEvent = 0; - PFN_vkResetFences vkResetFences = 0; - PFN_vkResetQueryPool vkResetQueryPool = 0; - PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; - PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; - PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; - PFN_vkSetEvent vkSetEvent = 0; - PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; - PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; - PFN_vkSignalSemaphore vkSignalSemaphore = 0; - PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0; - PFN_vkTrimCommandPool vkTrimCommandPool = 0; - PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; - PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; - PFN_vkUnmapMemory vkUnmapMemory = 0; - PFN_vkUnregisterObjectsNVX vkUnregisterObjectsNVX = 0; - PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; - PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; - PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; - PFN_vkWaitForFences vkWaitForFences = 0; - PFN_vkWaitSemaphores vkWaitSemaphores = 0; - PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0; -#ifdef VK_USE_PLATFORM_ANDROID_KHR - PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; - PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; - PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; - PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; -#ifdef VK_USE_PLATFORM_IOS_MVK - PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; -#endif /*VK_USE_PLATFORM_IOS_MVK*/ -#ifdef VK_USE_PLATFORM_FUCHSIA - PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0; -#endif /*VK_USE_PLATFORM_FUCHSIA*/ -#ifdef VK_USE_PLATFORM_MACOS_MVK - PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ -#ifdef VK_USE_PLATFORM_METAL_EXT - PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0; -#endif /*VK_USE_PLATFORM_METAL_EXT*/ -#ifdef VK_USE_PLATFORM_GGP - PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0; -#endif /*VK_USE_PLATFORM_GGP*/ -#ifdef VK_USE_PLATFORM_VI_NN - PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; -#endif /*VK_USE_PLATFORM_VI_NN*/ -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_XCB_KHR - PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; -#endif /*VK_USE_PLATFORM_XCB_KHR*/ -#ifdef VK_USE_PLATFORM_XLIB_KHR - PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; - PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; - PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; - PFN_vkDestroyInstance vkDestroyInstance = 0; - PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; - PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; - PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; - PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; - PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; - PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - PFN_vkCreateDevice vkCreateDevice = 0; - PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; - PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; - PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; - PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0; - PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; - PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; - PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; - PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; - PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0; +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0; PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; - PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; - PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; + PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; + PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0; - PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; - PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; + PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; - PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; - PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX = 0; + PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; - PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; - PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; + PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0; PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0; PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0; PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0; @@ -70531,11 +90629,49 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_XLIB_KHR PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; #endif /*VK_USE_PLATFORM_XLIB_KHR*/ + PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; + PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; + PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; + PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; + PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0; + PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; + PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; #ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0; #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; - PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; + PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; + PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; + PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0; + PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; + PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; + PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; + PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; + PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; + PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0; + PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; + PFN_vkMapMemory vkMapMemory = 0; + PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; + PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; PFN_vkQueueBindSparse vkQueueBindSparse = 0; PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; @@ -70544,24 +90680,67 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0; PFN_vkQueueSubmit vkQueueSubmit = 0; PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; + PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; + PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; + PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0; + PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0; + PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; + PFN_vkResetCommandPool vkResetCommandPool = 0; + PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; + PFN_vkResetEvent vkResetEvent = 0; + PFN_vkResetFences vkResetFences = 0; + PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; + PFN_vkResetQueryPool vkResetQueryPool = 0; + PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; + PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; + PFN_vkSetEvent vkSetEvent = 0; + PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; + PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; + PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0; + PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0; + PFN_vkSignalSemaphore vkSignalSemaphore = 0; + PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; + PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; + PFN_vkTrimCommandPool vkTrimCommandPool = 0; + PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; + PFN_vkUnmapMemory vkUnmapMemory = 0; + PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; + PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; + PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; + PFN_vkWaitForFences vkWaitForFences = 0; + PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0; + PFN_vkWaitSemaphores vkWaitSemaphores = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ public: DispatchLoaderDynamic() VULKAN_HPP_NOEXCEPT = default; #if !defined(VK_NO_PROTOTYPES) // This interface is designed to be used for per-device function pointers in combination with a linked vulkan library. - DispatchLoaderDynamic(VULKAN_HPP_NAMESPACE::Instance const& instance, VULKAN_HPP_NAMESPACE::Device const& device) VULKAN_HPP_NOEXCEPT + template + void init(VULKAN_HPP_NAMESPACE::Instance const& instance, VULKAN_HPP_NAMESPACE::Device const& device, DynamicLoader const& dl) VULKAN_HPP_NOEXCEPT { - init(instance, device); + PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.template getProcAddress("vkGetInstanceProcAddr"); + PFN_vkGetDeviceProcAddr getDeviceProcAddr = dl.template getProcAddress("vkGetDeviceProcAddr"); + init(static_cast(instance), getInstanceProcAddr, static_cast(device), device ? getDeviceProcAddr : nullptr); } // This interface is designed to be used for per-device function pointers in combination with a linked vulkan library. + template void init(VULKAN_HPP_NAMESPACE::Instance const& instance, VULKAN_HPP_NAMESPACE::Device const& device) VULKAN_HPP_NOEXCEPT { - static vk::DynamicLoader dl; - PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.getProcAddress("vkGetInstanceProcAddr"); - PFN_vkGetDeviceProcAddr getDeviceProcAddr = dl.getProcAddress("vkGetDeviceProcAddr"); - init(static_cast(instance), getInstanceProcAddr, static_cast(device), device ? getDeviceProcAddr : nullptr); + static DynamicLoader dl; + init(instance, device, dl); } #endif // !defined(VK_NO_PROTOTYPES) @@ -70601,11 +90780,19 @@ namespace VULKAN_HPP_NAMESPACE void init( VULKAN_HPP_NAMESPACE::Instance instanceCpp ) VULKAN_HPP_NOEXCEPT { VkInstance instance = static_cast(instanceCpp); +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT + vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ #ifdef VK_USE_PLATFORM_ANDROID_KHR vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) ); vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) ); + vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) ); +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) ); +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) ); vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) ); vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) ); #ifdef VK_USE_PLATFORM_IOS_MVK @@ -70643,61 +90830,69 @@ namespace VULKAN_HPP_NAMESPACE vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) ); vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) ); - vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) ); - vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); - vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) ); - vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) ); - vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) ); vkEnumerateDeviceExtensionProperties = PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) ); vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) ); + vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); + vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) ); + if ( !vkEnumeratePhysicalDeviceGroups ) vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR; vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) ); + vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) ); vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) ); vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) ); vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) ); vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) ); vkGetDisplayPlaneSupportedDisplaysKHR = PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) ); + vkGetInstanceProcAddr = PFN_vkGetInstanceProcAddr( vkGetInstanceProcAddr( instance, "vkGetInstanceProcAddr" ) ); vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) ); vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) ); +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + vkGetPhysicalDeviceDirectFBPresentationSupportEXT = PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) ); +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ vkGetPhysicalDeviceDisplayPlaneProperties2KHR = PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) ); vkGetPhysicalDeviceDisplayPlanePropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) ); vkGetPhysicalDeviceDisplayProperties2KHR = PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) ); vkGetPhysicalDeviceDisplayPropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) ); - vkGetPhysicalDeviceExternalBufferProperties = PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) ); vkGetPhysicalDeviceExternalBufferPropertiesKHR = PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) ); - vkGetPhysicalDeviceExternalFenceProperties = PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) ); + vkGetPhysicalDeviceExternalBufferProperties = PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) ); + if ( !vkGetPhysicalDeviceExternalBufferProperties ) vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR; vkGetPhysicalDeviceExternalFencePropertiesKHR = PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) ); + vkGetPhysicalDeviceExternalFenceProperties = PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) ); + if ( !vkGetPhysicalDeviceExternalFenceProperties ) vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR; vkGetPhysicalDeviceExternalImageFormatPropertiesNV = PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) ); - vkGetPhysicalDeviceExternalSemaphoreProperties = PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) ); + vkGetPhysicalDeviceExternalSemaphoreProperties = PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); + if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) ); - vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) ); vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) ); + vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) ); + if ( !vkGetPhysicalDeviceFeatures2 ) vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR; vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) ); - vkGetPhysicalDeviceFormatProperties2 = PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); vkGetPhysicalDeviceFormatProperties2KHR = PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) ); - vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX = PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX" ) ); + vkGetPhysicalDeviceFormatProperties2 = PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); + if ( !vkGetPhysicalDeviceFormatProperties2 ) vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR; vkGetPhysicalDeviceImageFormatProperties = PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) ); - vkGetPhysicalDeviceImageFormatProperties2 = PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); vkGetPhysicalDeviceImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) ); + vkGetPhysicalDeviceImageFormatProperties2 = PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); + if ( !vkGetPhysicalDeviceImageFormatProperties2 ) vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR; vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) ); - vkGetPhysicalDeviceMemoryProperties2 = PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) ); vkGetPhysicalDeviceMemoryProperties2KHR = PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) ); + vkGetPhysicalDeviceMemoryProperties2 = PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) ); + if ( !vkGetPhysicalDeviceMemoryProperties2 ) vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR; vkGetPhysicalDeviceMultisamplePropertiesEXT = PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); vkGetPhysicalDevicePresentRectanglesKHR = PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) ); vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) ); - vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) ); vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) ); + vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) ); + if ( !vkGetPhysicalDeviceProperties2 ) vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR; vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) ); vkGetPhysicalDeviceQueueFamilyProperties = PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) ); - vkGetPhysicalDeviceQueueFamilyProperties2 = PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) ); vkGetPhysicalDeviceQueueFamilyProperties2KHR = PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) ); + vkGetPhysicalDeviceQueueFamilyProperties2 = PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) ); + if ( !vkGetPhysicalDeviceQueueFamilyProperties2 ) vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR; vkGetPhysicalDeviceSparseImageFormatProperties = PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) ); - vkGetPhysicalDeviceSparseImageFormatProperties2 = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) ); vkGetPhysicalDeviceSparseImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties2 = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) ); + if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 ) vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR; vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) ); vkGetPhysicalDeviceSurfaceCapabilities2EXT = PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) ); vkGetPhysicalDeviceSurfaceCapabilities2KHR = PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) ); @@ -70726,107 +90921,7 @@ namespace VULKAN_HPP_NAMESPACE vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) ); #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) ); - vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetInstanceProcAddr( instance, "vkBeginCommandBuffer" ) ); - vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) ); - vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginDebugUtilsLabelEXT" ) ); - vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetInstanceProcAddr( instance, "vkCmdBeginQuery" ) ); - vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) ); - vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) ); - vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2" ) ); - vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) ); - vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) ); - vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) ); - vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer" ) ); - vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) ); - vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) ); - vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) ); - vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) ); - vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetInstanceProcAddr( instance, "vkCmdBlitImage" ) ); - vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) ); - vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetInstanceProcAddr( instance, "vkCmdClearAttachments" ) ); - vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) ); - vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) ); - vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) ); - vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer" ) ); - vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) ); - vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetInstanceProcAddr( instance, "vkCmdCopyImage" ) ); - vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) ); - vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) ); - vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) ); - vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) ); - vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) ); - vkCmdDispatch = PFN_vkCmdDispatch( vkGetInstanceProcAddr( instance, "vkCmdDispatch" ) ); - vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) ); - vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetInstanceProcAddr( instance, "vkCmdDispatchBaseKHR" ) ); - vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetInstanceProcAddr( instance, "vkCmdDispatchIndirect" ) ); - vkCmdDraw = PFN_vkCmdDraw( vkGetInstanceProcAddr( instance, "vkCmdDraw" ) ); - vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) ); - vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) ); - vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCount" ) ); - vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) ); - vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) ); - vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirect" ) ); - vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) ); - vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCount" ) ); - vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) ); - vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) ); - vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) ); - vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) ); - vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksNV" ) ); - vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdEndConditionalRenderingEXT" ) ); - vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdEndDebugUtilsLabelEXT" ) ); - vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetInstanceProcAddr( instance, "vkCmdEndQuery" ) ); - vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) ); - vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) ); - vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2" ) ); - vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) ); - vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) ); - vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) ); - vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) ); - vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) ); - vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) ); - vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) ); - vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2KHR" ) ); - vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) ); - vkCmdProcessCommandsNVX = PFN_vkCmdProcessCommandsNVX( vkGetInstanceProcAddr( instance, "vkCmdProcessCommandsNVX" ) ); - vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) ); - vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) ); - vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); - vkCmdReserveSpaceForCommandsNVX = PFN_vkCmdReserveSpaceForCommandsNVX( vkGetInstanceProcAddr( instance, "vkCmdReserveSpaceForCommandsNVX" ) ); - vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) ); - vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) ); - vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) ); - vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetInstanceProcAddr( instance, "vkCmdSetBlendConstants" ) ); - vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) ); - vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) ); - vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias" ) ); - vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBounds" ) ); - vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) ); - vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMaskKHR" ) ); - vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) ); - vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) ); - vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) ); - vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) ); - vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetInstanceProcAddr( instance, "vkCmdSetLineWidth" ) ); - vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceMarkerINTEL" ) ); - vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceOverrideINTEL" ) ); - vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); - vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) ); - vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetInstanceProcAddr( instance, "vkCmdSetScissor" ) ); - vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilCompareMask" ) ); - vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetInstanceProcAddr( instance, "vkCmdSetStencilReference" ) ); - vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilWriteMask" ) ); - vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetInstanceProcAddr( instance, "vkCmdSetViewport" ) ); - vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) ); - vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) ); - vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) ); - vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) ); - vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) ); - vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); - vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) ); - vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) ); - vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) ); - vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetInstanceProcAddr( instance, "vkResetCommandBuffer" ) ); + vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); #ifdef VK_USE_PLATFORM_WIN32_KHR vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkAcquireFullScreenExclusiveModeEXT" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ @@ -70837,41 +90932,223 @@ namespace VULKAN_HPP_NAMESPACE vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) ); vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) ); vkAllocateMemory = PFN_vkAllocateMemory( vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) ); + vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetInstanceProcAddr( instance, "vkBeginCommandBuffer" ) ); vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkBindAccelerationStructureMemoryKHR = PFN_vkBindAccelerationStructureMemoryKHR( vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryKHR" ) ); + if ( !vkBindAccelerationStructureMemoryKHR ) vkBindAccelerationStructureMemoryKHR = vkBindAccelerationStructureMemoryNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) ); - vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) ); vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2KHR" ) ); + vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) ); + if ( !vkBindBufferMemory2 ) vkBindBufferMemory2 = vkBindBufferMemory2KHR; vkBindImageMemory = PFN_vkBindImageMemory( vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) ); - vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) ); vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindImageMemory2KHR" ) ); + vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) ); + if ( !vkBindImageMemory2 ) vkBindImageMemory2 = vkBindImageMemory2KHR; +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkBuildAccelerationStructureKHR = PFN_vkBuildAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkBuildAccelerationStructureKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) ); + vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginDebugUtilsLabelEXT" ) ); + vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetInstanceProcAddr( instance, "vkCmdBeginQuery" ) ); + vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) ); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) ); + vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) ); + vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2" ) ); + if ( !vkCmdBeginRenderPass2 ) vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; + vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) ); + vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) ); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer" ) ); + vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) ); + vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetInstanceProcAddr( instance, "vkCmdBindPipelineShaderGroupNV" ) ); + vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) ); + vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) ); + vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) ); + vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2EXT" ) ); + vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetInstanceProcAddr( instance, "vkCmdBlitImage" ) ); + vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2KHR" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdBuildAccelerationStructureIndirectKHR = PFN_vkCmdBuildAccelerationStructureIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureIndirectKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdBuildAccelerationStructureKHR = PFN_vkCmdBuildAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) ); + vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetInstanceProcAddr( instance, "vkCmdClearAttachments" ) ); + vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) ); + vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer" ) ); + vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2KHR" ) ); + vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) ); + vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2KHR" ) ); + vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetInstanceProcAddr( instance, "vkCmdCopyImage" ) ); + vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2KHR" ) ); + vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) ); + vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2KHR" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) ); + vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) ); + vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) ); + vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) ); + vkCmdDispatch = PFN_vkCmdDispatch( vkGetInstanceProcAddr( instance, "vkCmdDispatch" ) ); + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetInstanceProcAddr( instance, "vkCmdDispatchBaseKHR" ) ); + vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) ); + if ( !vkCmdDispatchBase ) vkCmdDispatchBase = vkCmdDispatchBaseKHR; + vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetInstanceProcAddr( instance, "vkCmdDispatchIndirect" ) ); + vkCmdDraw = PFN_vkCmdDraw( vkGetInstanceProcAddr( instance, "vkCmdDraw" ) ); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) ); + vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) ); + vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) ); + vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) ); + vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCount" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + if ( !vkCmdDrawIndexedIndirectCount ) vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirect" ) ); + vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) ); + vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) ); + vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) ); + vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCount" ) ); + if ( !vkCmdDrawIndirectCount ) vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; + if ( !vkCmdDrawIndirectCount ) vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; + vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) ); + vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksNV" ) ); + vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdEndConditionalRenderingEXT" ) ); + vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdEndDebugUtilsLabelEXT" ) ); + vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetInstanceProcAddr( instance, "vkCmdEndQuery" ) ); + vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) ); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) ); + vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) ); + vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2" ) ); + if ( !vkCmdEndRenderPass2 ) vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) ); + vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) ); + vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsNV" ) ); + vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) ); + vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) ); + vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) ); + vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2KHR" ) ); + vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) ); + if ( !vkCmdNextSubpass2 ) vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; + vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) ); + vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsNV" ) ); + vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) ); + vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) ); + vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) ); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) ); + vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) ); + vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2KHR" ) ); + vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetInstanceProcAddr( instance, "vkCmdSetBlendConstants" ) ); + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) ); + vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) ); + vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetCullModeEXT" ) ); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias" ) ); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBounds" ) ); + vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnableEXT" ) ); + vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOpEXT" ) ); + vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnableEXT" ) ); + vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnableEXT" ) ); + vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMaskKHR" ) ); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) ); + if ( !vkCmdSetDeviceMask ) vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; + vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) ); + vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) ); + vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) ); + vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFaceEXT" ) ); + vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) ); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetInstanceProcAddr( instance, "vkCmdSetLineWidth" ) ); + vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceMarkerINTEL" ) ); + vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceOverrideINTEL" ) ); + vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); + vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopologyEXT" ) ); + vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) ); + vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetInstanceProcAddr( instance, "vkCmdSetScissor" ) ); + vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCountEXT" ) ); + vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilCompareMask" ) ); + vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOpEXT" ) ); + vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetInstanceProcAddr( instance, "vkCmdSetStencilReference" ) ); + vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnableEXT" ) ); + vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilWriteMask" ) ); + vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetInstanceProcAddr( instance, "vkCmdSetViewport" ) ); + vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) ); + vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) ); + vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCountEXT" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirectKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) ); + vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) ); + vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) ); + vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); + if ( !vkCmdWriteAccelerationStructuresPropertiesKHR ) vkCmdWriteAccelerationStructuresPropertiesKHR = vkCmdWriteAccelerationStructuresPropertiesNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) ); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) ); vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetInstanceProcAddr( instance, "vkCompileDeferredNV" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureToMemoryKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyMemoryToAccelerationStructureKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) ); vkCreateBuffer = PFN_vkCreateBuffer( vkGetInstanceProcAddr( instance, "vkCreateBuffer" ) ); vkCreateBufferView = PFN_vkCreateBufferView( vkGetInstanceProcAddr( instance, "vkCreateBufferView" ) ); vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetInstanceProcAddr( instance, "vkCreateCommandPool" ) ); vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetInstanceProcAddr( instance, "vkCreateComputePipelines" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkCreateDeferredOperationKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetInstanceProcAddr( instance, "vkCreateDescriptorPool" ) ); vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkCreateDescriptorSetLayout" ) ); - vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) ); vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplateKHR" ) ); + vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) ); + if ( !vkCreateDescriptorUpdateTemplate ) vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; vkCreateEvent = PFN_vkCreateEvent( vkGetInstanceProcAddr( instance, "vkCreateEvent" ) ); vkCreateFence = PFN_vkCreateFence( vkGetInstanceProcAddr( instance, "vkCreateFence" ) ); vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetInstanceProcAddr( instance, "vkCreateFramebuffer" ) ); vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetInstanceProcAddr( instance, "vkCreateGraphicsPipelines" ) ); vkCreateImage = PFN_vkCreateImage( vkGetInstanceProcAddr( instance, "vkCreateImage" ) ); vkCreateImageView = PFN_vkCreateImageView( vkGetInstanceProcAddr( instance, "vkCreateImageView" ) ); - vkCreateIndirectCommandsLayoutNVX = PFN_vkCreateIndirectCommandsLayoutNVX( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNVX" ) ); - vkCreateObjectTableNVX = PFN_vkCreateObjectTableNVX( vkGetInstanceProcAddr( instance, "vkCreateObjectTableNVX" ) ); + vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNV" ) ); vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetInstanceProcAddr( instance, "vkCreatePipelineCache" ) ); vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetInstanceProcAddr( instance, "vkCreatePipelineLayout" ) ); + vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlotEXT" ) ); vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetInstanceProcAddr( instance, "vkCreateQueryPool" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) ); vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) ); - vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2" ) ); vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) ); + vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2" ) ); + if ( !vkCreateRenderPass2 ) vkCreateRenderPass2 = vkCreateRenderPass2KHR; vkCreateSampler = PFN_vkCreateSampler( vkGetInstanceProcAddr( instance, "vkCreateSampler" ) ); - vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) ); vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversionKHR" ) ); + vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) ); + if ( !vkCreateSamplerYcbcrConversion ) vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetInstanceProcAddr( instance, "vkCreateSemaphore" ) ); vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetInstanceProcAddr( instance, "vkCreateShaderModule" ) ); vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetInstanceProcAddr( instance, "vkCreateSharedSwapchainsKHR" ) ); @@ -70879,66 +91156,101 @@ namespace VULKAN_HPP_NAMESPACE vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkCreateValidationCacheEXT" ) ); vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) ); vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetInstanceProcAddr( instance, "vkDeferredOperationJoinKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureKHR" ) ); + if ( !vkDestroyAccelerationStructureKHR ) vkDestroyAccelerationStructureKHR = vkDestroyAccelerationStructureNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetInstanceProcAddr( instance, "vkDestroyBuffer" ) ); vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetInstanceProcAddr( instance, "vkDestroyBufferView" ) ); vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetInstanceProcAddr( instance, "vkDestroyCommandPool" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkDestroyDeferredOperationKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorPool" ) ); vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorSetLayout" ) ); - vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) ); vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplateKHR" ) ); + vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) ); + if ( !vkDestroyDescriptorUpdateTemplate ) vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; vkDestroyDevice = PFN_vkDestroyDevice( vkGetInstanceProcAddr( instance, "vkDestroyDevice" ) ); vkDestroyEvent = PFN_vkDestroyEvent( vkGetInstanceProcAddr( instance, "vkDestroyEvent" ) ); vkDestroyFence = PFN_vkDestroyFence( vkGetInstanceProcAddr( instance, "vkDestroyFence" ) ); vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetInstanceProcAddr( instance, "vkDestroyFramebuffer" ) ); vkDestroyImage = PFN_vkDestroyImage( vkGetInstanceProcAddr( instance, "vkDestroyImage" ) ); vkDestroyImageView = PFN_vkDestroyImageView( vkGetInstanceProcAddr( instance, "vkDestroyImageView" ) ); - vkDestroyIndirectCommandsLayoutNVX = PFN_vkDestroyIndirectCommandsLayoutNVX( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNVX" ) ); - vkDestroyObjectTableNVX = PFN_vkDestroyObjectTableNVX( vkGetInstanceProcAddr( instance, "vkDestroyObjectTableNVX" ) ); + vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNV" ) ); vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetInstanceProcAddr( instance, "vkDestroyPipeline" ) ); vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetInstanceProcAddr( instance, "vkDestroyPipelineCache" ) ); vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetInstanceProcAddr( instance, "vkDestroyPipelineLayout" ) ); + vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlotEXT" ) ); vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetInstanceProcAddr( instance, "vkDestroyQueryPool" ) ); vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetInstanceProcAddr( instance, "vkDestroyRenderPass" ) ); vkDestroySampler = PFN_vkDestroySampler( vkGetInstanceProcAddr( instance, "vkDestroySampler" ) ); - vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) ); vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversionKHR" ) ); + vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) ); + if ( !vkDestroySamplerYcbcrConversion ) vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetInstanceProcAddr( instance, "vkDestroySemaphore" ) ); vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetInstanceProcAddr( instance, "vkDestroyShaderModule" ) ); vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetInstanceProcAddr( instance, "vkDestroySwapchainKHR" ) ); vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkDestroyValidationCacheEXT" ) ); vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetInstanceProcAddr( instance, "vkDeviceWaitIdle" ) ); vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetInstanceProcAddr( instance, "vkDisplayPowerControlEXT" ) ); + vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) ); vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkFlushMappedMemoryRanges" ) ); vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetInstanceProcAddr( instance, "vkFreeCommandBuffers" ) ); vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetInstanceProcAddr( instance, "vkFreeDescriptorSets" ) ); vkFreeMemory = PFN_vkFreeMemory( vkGetInstanceProcAddr( instance, "vkFreeMemory" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureDeviceAddressKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureHandleNV" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkGetAccelerationStructureMemoryRequirementsKHR = PFN_vkGetAccelerationStructureMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); #ifdef VK_USE_PLATFORM_ANDROID_KHR vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddress" ) ); vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) ); vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressKHR" ) ); + vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddress" ) ); + if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; + if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements" ) ); - vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) ); vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) ); - vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddress" ) ); + vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) ); + if ( !vkGetBufferMemoryRequirements2 ) vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddressKHR" ) ); + vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddress" ) ); + if ( !vkGetBufferOpaqueCaptureAddress ) vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) ); - vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationResultKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) ); - vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) ); + if ( !vkGetDescriptorSetLayoutSupport ) vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); + vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + if ( !vkGetDeviceGroupPeerMemoryFeatures ) vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); #ifdef VK_USE_PLATFORM_WIN32_KHR vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryCommitment" ) ); - vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); + vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) ); vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue" ) ); vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue2" ) ); @@ -70948,14 +91260,18 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_WIN32_KHR vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements" ) ); - vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) ); vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2KHR" ) ); + vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) ); + if ( !vkGetImageMemoryRequirements2 ) vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements" ) ); - vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) ); vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2KHR" ) ); + vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) ); + if ( !vkGetImageSparseMemoryRequirements2 ) vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout" ) ); + vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewAddressNVX" ) ); vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) ); #ifdef VK_USE_PLATFORM_ANDROID_KHR vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); @@ -70978,12 +91294,22 @@ namespace VULKAN_HPP_NAMESPACE vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) ); vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) ); + vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkGetPrivateDataEXT" ) ); vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetInstanceProcAddr( instance, "vkGetQueryPoolResults" ) ); + vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesKHR" ) ); + if ( !vkGetRayTracingShaderGroupHandlesKHR ) vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) ); vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) ); - vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValue" ) ); vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValueKHR" ) ); + vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValue" ) ); + if ( !vkGetSemaphoreCounterValue ) vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreFdKHR" ) ); #ifdef VK_USE_PLATFORM_WIN32_KHR vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreWin32HandleKHR" ) ); @@ -71006,39 +91332,6 @@ namespace VULKAN_HPP_NAMESPACE vkMapMemory = PFN_vkMapMemory( vkGetInstanceProcAddr( instance, "vkMapMemory" ) ); vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetInstanceProcAddr( instance, "vkMergePipelineCaches" ) ); vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) ); - vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) ); - vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) ); - vkRegisterObjectsNVX = PFN_vkRegisterObjectsNVX( vkGetInstanceProcAddr( instance, "vkRegisterObjectsNVX" ) ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) ); - vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkReleaseProfilingLockKHR" ) ); - vkResetCommandPool = PFN_vkResetCommandPool( vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) ); - vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetInstanceProcAddr( instance, "vkResetDescriptorPool" ) ); - vkResetEvent = PFN_vkResetEvent( vkGetInstanceProcAddr( instance, "vkResetEvent" ) ); - vkResetFences = PFN_vkResetFences( vkGetInstanceProcAddr( instance, "vkResetFences" ) ); - vkResetQueryPool = PFN_vkResetQueryPool( vkGetInstanceProcAddr( instance, "vkResetQueryPool" ) ); - vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) ); - vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) ); - vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) ); - vkSetEvent = PFN_vkSetEvent( vkGetInstanceProcAddr( instance, "vkSetEvent" ) ); - vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) ); - vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetInstanceProcAddr( instance, "vkSetLocalDimmingAMD" ) ); - vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetInstanceProcAddr( instance, "vkSignalSemaphore" ) ); - vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetInstanceProcAddr( instance, "vkSignalSemaphoreKHR" ) ); - vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) ); - vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetInstanceProcAddr( instance, "vkTrimCommandPoolKHR" ) ); - vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) ); - vkUnmapMemory = PFN_vkUnmapMemory( vkGetInstanceProcAddr( instance, "vkUnmapMemory" ) ); - vkUnregisterObjectsNVX = PFN_vkUnregisterObjectsNVX( vkGetInstanceProcAddr( instance, "vkUnregisterObjectsNVX" ) ); - vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) ); - vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) ); - vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) ); - vkWaitForFences = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) ); - vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetInstanceProcAddr( instance, "vkWaitSemaphores" ) ); - vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetInstanceProcAddr( instance, "vkWaitSemaphoresKHR" ) ); - vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) ); vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) ); vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetInstanceProcAddr( instance, "vkQueueBindSparse" ) ); vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueEndDebugUtilsLabelEXT" ) ); @@ -71047,112 +91340,51 @@ namespace VULKAN_HPP_NAMESPACE vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) ); vkQueueSubmit = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) ); vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) ); + vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) ); + vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) ); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) ); + vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkReleaseProfilingLockKHR" ) ); + vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetInstanceProcAddr( instance, "vkResetCommandBuffer" ) ); + vkResetCommandPool = PFN_vkResetCommandPool( vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) ); + vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetInstanceProcAddr( instance, "vkResetDescriptorPool" ) ); + vkResetEvent = PFN_vkResetEvent( vkGetInstanceProcAddr( instance, "vkResetEvent" ) ); + vkResetFences = PFN_vkResetFences( vkGetInstanceProcAddr( instance, "vkResetFences" ) ); + vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) ); + vkResetQueryPool = PFN_vkResetQueryPool( vkGetInstanceProcAddr( instance, "vkResetQueryPool" ) ); + if ( !vkResetQueryPool ) vkResetQueryPool = vkResetQueryPoolEXT; + vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) ); + vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) ); + vkSetEvent = PFN_vkSetEvent( vkGetInstanceProcAddr( instance, "vkSetEvent" ) ); + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) ); + vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetInstanceProcAddr( instance, "vkSetLocalDimmingAMD" ) ); + vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkSetPrivateDataEXT" ) ); + vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetInstanceProcAddr( instance, "vkSignalSemaphoreKHR" ) ); + vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetInstanceProcAddr( instance, "vkSignalSemaphore" ) ); + if ( !vkSignalSemaphore ) vkSignalSemaphore = vkSignalSemaphoreKHR; + vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetInstanceProcAddr( instance, "vkTrimCommandPoolKHR" ) ); + vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) ); + if ( !vkTrimCommandPool ) vkTrimCommandPool = vkTrimCommandPoolKHR; + vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) ); + vkUnmapMemory = PFN_vkUnmapMemory( vkGetInstanceProcAddr( instance, "vkUnmapMemory" ) ); + vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) ); + vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) ); + if ( !vkUpdateDescriptorSetWithTemplate ) vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) ); + vkWaitForFences = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) ); + vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetInstanceProcAddr( instance, "vkWaitSemaphoresKHR" ) ); + vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetInstanceProcAddr( instance, "vkWaitSemaphores" ) ); + if ( !vkWaitSemaphores ) vkWaitSemaphores = vkWaitSemaphoresKHR; +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkWriteAccelerationStructuresPropertiesKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ } void init( VULKAN_HPP_NAMESPACE::Device deviceCpp ) VULKAN_HPP_NOEXCEPT { VkDevice device = static_cast(deviceCpp); - vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) ); - vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); - vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) ); - vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) ); - vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) ); - vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) ); - vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) ); - vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) ); - vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) ); - vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) ); - vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) ); - vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) ); - vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); - vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); - vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) ); - vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) ); - vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) ); - vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) ); - vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) ); - vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) ); - vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) ); - vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) ); - vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) ); - vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) ); - vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) ); - vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) ); - vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) ); - vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) ); - vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) ); - vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) ); - vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) ); - vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) ); - vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) ); - vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) ); - vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) ); - vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) ); - vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) ); - vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) ); - vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) ); - vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) ); - vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) ); - vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) ); - vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); - vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) ); - vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) ); - vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); - vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) ); - vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) ); - vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) ); - vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) ); - vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) ); - vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) ); - vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) ); - vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) ); - vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) ); - vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) ); - vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) ); - vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); - vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); - vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) ); - vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) ); - vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) ); - vkCmdProcessCommandsNVX = PFN_vkCmdProcessCommandsNVX( vkGetDeviceProcAddr( device, "vkCmdProcessCommandsNVX" ) ); - vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) ); - vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); - vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); - vkCmdReserveSpaceForCommandsNVX = PFN_vkCmdReserveSpaceForCommandsNVX( vkGetDeviceProcAddr( device, "vkCmdReserveSpaceForCommandsNVX" ) ); - vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) ); - vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) ); - vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) ); - vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) ); - vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); - vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) ); - vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) ); - vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) ); - vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) ); - vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) ); - vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) ); - vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) ); - vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); - vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) ); - vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) ); - vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) ); - vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) ); - vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); - vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); - vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) ); - vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) ); - vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) ); - vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) ); - vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) ); - vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); - vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); - vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) ); - vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) ); - vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) ); - vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); - vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); - vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) ); - vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) ); - vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) ); #ifdef VK_USE_PLATFORM_WIN32_KHR vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ @@ -71163,41 +91395,223 @@ namespace VULKAN_HPP_NAMESPACE vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) ); vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) ); vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) ); + vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) ); vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkBindAccelerationStructureMemoryKHR = PFN_vkBindAccelerationStructureMemoryKHR( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryKHR" ) ); + if ( !vkBindAccelerationStructureMemoryKHR ) vkBindAccelerationStructureMemoryKHR = vkBindAccelerationStructureMemoryNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) ); - vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) ); + vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); + if ( !vkBindBufferMemory2 ) vkBindBufferMemory2 = vkBindBufferMemory2KHR; vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) ); - vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) ); + vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); + if ( !vkBindImageMemory2 ) vkBindImageMemory2 = vkBindImageMemory2KHR; +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkBuildAccelerationStructureKHR = PFN_vkBuildAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructureKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); + vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) ); + vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) ); + vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) ); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) ); + vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) ); + vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) ); + if ( !vkCmdBeginRenderPass2 ) vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; + vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) ); + vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) ); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) ); + vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) ); + vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) ); + vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); + vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); + vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) ); + vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) ); + vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) ); + vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdBuildAccelerationStructureIndirectKHR = PFN_vkCmdBuildAccelerationStructureIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureIndirectKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdBuildAccelerationStructureKHR = PFN_vkCmdBuildAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) ); + vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) ); + vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) ); + vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) ); + vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) ); + vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) ); + vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) ); + vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) ); + vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) ); + vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) ); + vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) ); + vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) ); + vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) ); + vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) ); + vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) ); + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) ); + vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) ); + if ( !vkCmdDispatchBase ) vkCmdDispatchBase = vkCmdDispatchBaseKHR; + vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) ); + vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) ); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) ); + vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) ); + vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) ); + vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) ); + vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + if ( !vkCmdDrawIndexedIndirectCount ) vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) ); + vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) ); + vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); + vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) ); + vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) ); + if ( !vkCmdDrawIndirectCount ) vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; + if ( !vkCmdDrawIndirectCount ) vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; + vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); + vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) ); + vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) ); + vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) ); + vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) ); + vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) ); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) ); + vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) ); + vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) ); + if ( !vkCmdEndRenderPass2 ) vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) ); + vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) ); + vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) ); + vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) ); + vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); + vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); + vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) ); + vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) ); + if ( !vkCmdNextSubpass2 ) vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; + vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) ); + vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) ); + vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) ); + vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); + vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) ); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) ); + vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) ); + vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) ); + vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) ); + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); + vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) ); + vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) ); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) ); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) ); + vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) ); + vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) ); + vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) ); + vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) ); + vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) ); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) ); + if ( !vkCmdSetDeviceMask ) vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; + vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) ); + vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) ); + vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); + vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) ); + vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) ); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) ); + vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) ); + vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) ); + vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); + vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) ); + vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); + vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) ); + vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) ); + vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) ); + vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) ); + vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) ); + vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) ); + vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) ); + vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) ); + vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); + vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); + vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) ); + vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) ); + vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) ); + vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); + if ( !vkCmdWriteAccelerationStructuresPropertiesKHR ) vkCmdWriteAccelerationStructuresPropertiesKHR = vkCmdWriteAccelerationStructuresPropertiesNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) ); vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) ); vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) ); vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) ); vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) ); vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) ); vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) ); - vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) ); vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) ); + vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) ); + if ( !vkCreateDescriptorUpdateTemplate ) vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) ); vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) ); vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) ); vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) ); vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) ); vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) ); - vkCreateIndirectCommandsLayoutNVX = PFN_vkCreateIndirectCommandsLayoutNVX( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNVX" ) ); - vkCreateObjectTableNVX = PFN_vkCreateObjectTableNVX( vkGetDeviceProcAddr( device, "vkCreateObjectTableNVX" ) ); + vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) ); vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) ); vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) ); + vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) ); vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) ); vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) ); - vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) ); vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) ); + vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) ); + if ( !vkCreateRenderPass2 ) vkCreateRenderPass2 = vkCreateRenderPass2KHR; vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) ); - vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) ); vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) ); + vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) ); + if ( !vkCreateSamplerYcbcrConversion ) vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) ); vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) ); vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) ); @@ -71205,66 +91619,101 @@ namespace VULKAN_HPP_NAMESPACE vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) ); vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) ); vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) ); + if ( !vkDestroyAccelerationStructureKHR ) vkDestroyAccelerationStructureKHR = vkDestroyAccelerationStructureNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) ); vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) ); vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) ); vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) ); - vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) ); vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) ); + vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) ); + if ( !vkDestroyDescriptorUpdateTemplate ) vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) ); vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) ); vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) ); vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) ); vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) ); vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) ); - vkDestroyIndirectCommandsLayoutNVX = PFN_vkDestroyIndirectCommandsLayoutNVX( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNVX" ) ); - vkDestroyObjectTableNVX = PFN_vkDestroyObjectTableNVX( vkGetDeviceProcAddr( device, "vkDestroyObjectTableNVX" ) ); + vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) ); vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) ); vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) ); vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) ); + vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) ); vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) ); vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) ); vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) ); - vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) ); vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) ); + vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) ); + if ( !vkDestroySamplerYcbcrConversion ) vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) ); vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) ); vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) ); vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) ); vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) ); vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) ); + vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) ); vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) ); vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) ); vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) ); vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkGetAccelerationStructureMemoryRequirementsKHR = PFN_vkGetAccelerationStructureMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); #ifdef VK_USE_PLATFORM_ANDROID_KHR vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) ); vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) ); vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) ); + vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) ); + if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; + if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) ); - vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) ); vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) ); - vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) ); + vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) ); + if ( !vkGetBufferMemoryRequirements2 ) vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) ); + vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) ); + if ( !vkGetBufferOpaqueCaptureAddress ) vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); - vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) ); - vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); + if ( !vkGetDescriptorSetLayoutSupport ) vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); + vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + if ( !vkGetDeviceGroupPeerMemoryFeatures ) vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); #ifdef VK_USE_PLATFORM_WIN32_KHR vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) ); - vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); + vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) ); vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) ); vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) ); @@ -71274,14 +91723,18 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VK_USE_PLATFORM_WIN32_KHR vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) ); - vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) ); vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) ); + vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) ); + if ( !vkGetImageMemoryRequirements2 ) vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) ); - vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) ); vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) ); + vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) ); + if ( !vkGetImageSparseMemoryRequirements2 ) vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) ); + vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); #ifdef VK_USE_PLATFORM_ANDROID_KHR vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); @@ -71304,12 +91757,22 @@ namespace VULKAN_HPP_NAMESPACE vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) ); vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) ); + vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) ); vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) ); + vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) ); +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) ); + if ( !vkGetRayTracingShaderGroupHandlesKHR ) vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) ); vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) ); - vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) ); vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) ); + vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) ); + if ( !vkGetSemaphoreCounterValue ) vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) ); #ifdef VK_USE_PLATFORM_WIN32_KHR vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) ); @@ -71332,39 +91795,6 @@ namespace VULKAN_HPP_NAMESPACE vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) ); vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) ); vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) ); - vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); - vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); - vkRegisterObjectsNVX = PFN_vkRegisterObjectsNVX( vkGetDeviceProcAddr( device, "vkRegisterObjectsNVX" ) ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) ); - vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) ); - vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) ); - vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) ); - vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) ); - vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) ); - vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) ); - vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); - vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) ); - vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) ); - vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) ); - vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) ); - vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) ); - vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) ); - vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) ); - vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) ); - vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) ); - vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) ); - vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) ); - vkUnregisterObjectsNVX = PFN_vkUnregisterObjectsNVX( vkGetDeviceProcAddr( device, "vkUnregisterObjectsNVX" ) ); - vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); - vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) ); - vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) ); - vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) ); - vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) ); - vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) ); - vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) ); vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) ); vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) ); @@ -71373,8 +91803,366 @@ namespace VULKAN_HPP_NAMESPACE vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) ); vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) ); vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) ); + vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); + vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) ); + vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) ); + vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) ); + vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) ); + vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) ); + vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) ); + vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) ); + vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); + vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) ); + if ( !vkResetQueryPool ) vkResetQueryPool = vkResetQueryPoolEXT; + vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) ); + vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) ); + vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) ); + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) ); + vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) ); + vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) ); + vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) ); + vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) ); + if ( !vkSignalSemaphore ) vkSignalSemaphore = vkSignalSemaphoreKHR; + vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) ); + vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) ); + if ( !vkTrimCommandPool ) vkTrimCommandPool = vkTrimCommandPoolKHR; + vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) ); + vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) ); + vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) ); + vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); + if ( !vkUpdateDescriptorSetWithTemplate ) vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) ); + vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) ); + vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) ); + vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) ); + if ( !vkWaitSemaphores ) vkWaitSemaphores = vkWaitSemaphoresKHR; +#ifdef VK_ENABLE_BETA_EXTENSIONS + vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ } }; } // namespace VULKAN_HPP_NAMESPACE + +namespace std +{ + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const& accelerationStructureKHR) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(accelerationStructureKHR)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::Buffer const& buffer) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(buffer)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::BufferView const& bufferView) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(bufferView)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::CommandBuffer const& commandBuffer) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(commandBuffer)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::CommandPool const& commandPool) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(commandPool)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT const& debugReportCallbackEXT) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(debugReportCallbackEXT)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT const& debugUtilsMessengerEXT) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(debugUtilsMessengerEXT)); + } + }; + +#ifdef VK_ENABLE_BETA_EXTENSIONS + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::DeferredOperationKHR const& deferredOperationKHR) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(deferredOperationKHR)); + } + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorPool const& descriptorPool) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(descriptorPool)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorSet const& descriptorSet) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(descriptorSet)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorSetLayout const& descriptorSetLayout) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(descriptorSetLayout)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate const& descriptorUpdateTemplate) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(descriptorUpdateTemplate)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::Device const& device) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(device)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceMemory const& deviceMemory) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(deviceMemory)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::DisplayKHR const& displayKHR) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(displayKHR)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::DisplayModeKHR const& displayModeKHR) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(displayModeKHR)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::Event const& event) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(event)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::Fence const& fence) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(fence)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::Framebuffer const& framebuffer) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(framebuffer)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::Image const& image) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(image)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageView const& imageView) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(imageView)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV const& indirectCommandsLayoutNV) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(indirectCommandsLayoutNV)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::Instance const& instance) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(instance)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL const& performanceConfigurationINTEL) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(performanceConfigurationINTEL)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevice const& physicalDevice) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(physicalDevice)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::Pipeline const& pipeline) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(pipeline)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineCache const& pipelineCache) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(pipelineCache)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineLayout const& pipelineLayout) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(pipelineLayout)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT const& privateDataSlotEXT) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(privateDataSlotEXT)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::QueryPool const& queryPool) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(queryPool)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::Queue const& queue) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(queue)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::RenderPass const& renderPass) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(renderPass)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::Sampler const& sampler) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(sampler)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion const& samplerYcbcrConversion) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(samplerYcbcrConversion)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::Semaphore const& semaphore) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(semaphore)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::ShaderModule const& shaderModule) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(shaderModule)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::SurfaceKHR const& surfaceKHR) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(surfaceKHR)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::SwapchainKHR const& swapchainKHR) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(swapchainKHR)); + } + }; + + template <> struct hash + { + std::size_t operator()(VULKAN_HPP_NAMESPACE::ValidationCacheEXT const& validationCacheEXT) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}(static_cast(validationCacheEXT)); + } + }; +} #endif diff --git a/thirdparty/vulkan/include/vulkan/vulkan_android.h b/thirdparty/vulkan/include/vulkan/vulkan_android.h index 9b8d3e276f8..50ef85f13e4 100644 --- a/thirdparty/vulkan/include/vulkan/vulkan_android.h +++ b/thirdparty/vulkan/include/vulkan/vulkan_android.h @@ -2,19 +2,9 @@ #define VULKAN_ANDROID_H_ 1 /* -** Copyright (c) 2015-2019 The Khronos Group Inc. +** Copyright (c) 2015-2020 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ /* diff --git a/thirdparty/vulkan/include/vulkan/vulkan_beta.h b/thirdparty/vulkan/include/vulkan/vulkan_beta.h new file mode 100644 index 00000000000..4b7f2b29936 --- /dev/null +++ b/thirdparty/vulkan/include/vulkan/vulkan_beta.h @@ -0,0 +1,459 @@ +#ifndef VULKAN_BETA_H_ +#define VULKAN_BETA_H_ 1 + +/* +** Copyright (c) 2015-2020 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +#define VK_KHR_portability_subset 1 +#define VK_KHR_PORTABILITY_SUBSET_SPEC_VERSION 1 +#define VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME "VK_KHR_portability_subset" +typedef struct VkPhysicalDevicePortabilitySubsetFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 constantAlphaColorBlendFactors; + VkBool32 events; + VkBool32 imageViewFormatReinterpretation; + VkBool32 imageViewFormatSwizzle; + VkBool32 imageView2DOn3DImage; + VkBool32 multisampleArrayImage; + VkBool32 mutableComparisonSamplers; + VkBool32 pointPolygons; + VkBool32 samplerMipLodBias; + VkBool32 separateStencilMaskRef; + VkBool32 shaderSampleRateInterpolationFunctions; + VkBool32 tessellationIsolines; + VkBool32 tessellationPointMode; + VkBool32 triangleFans; + VkBool32 vertexAttributeAccessBeyondStride; +} VkPhysicalDevicePortabilitySubsetFeaturesKHR; + +typedef struct VkPhysicalDevicePortabilitySubsetPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t minVertexInputBindingStrideAlignment; +} VkPhysicalDevicePortabilitySubsetPropertiesKHR; + + + +#define VK_KHR_deferred_host_operations 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeferredOperationKHR) +#define VK_KHR_DEFERRED_HOST_OPERATIONS_SPEC_VERSION 3 +#define VK_KHR_DEFERRED_HOST_OPERATIONS_EXTENSION_NAME "VK_KHR_deferred_host_operations" +typedef struct VkDeferredOperationInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeferredOperationKHR operationHandle; +} VkDeferredOperationInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateDeferredOperationKHR)(VkDevice device, const VkAllocationCallbacks* pAllocator, VkDeferredOperationKHR* pDeferredOperation); +typedef void (VKAPI_PTR *PFN_vkDestroyDeferredOperationKHR)(VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks* pAllocator); +typedef uint32_t (VKAPI_PTR *PFN_vkGetDeferredOperationMaxConcurrencyKHR)(VkDevice device, VkDeferredOperationKHR operation); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeferredOperationResultKHR)(VkDevice device, VkDeferredOperationKHR operation); +typedef VkResult (VKAPI_PTR *PFN_vkDeferredOperationJoinKHR)(VkDevice device, VkDeferredOperationKHR operation); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDeferredOperationKHR( + VkDevice device, + const VkAllocationCallbacks* pAllocator, + VkDeferredOperationKHR* pDeferredOperation); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDeferredOperationKHR( + VkDevice device, + VkDeferredOperationKHR operation, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR uint32_t VKAPI_CALL vkGetDeferredOperationMaxConcurrencyKHR( + VkDevice device, + VkDeferredOperationKHR operation); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeferredOperationResultKHR( + VkDevice device, + VkDeferredOperationKHR operation); + +VKAPI_ATTR VkResult VKAPI_CALL vkDeferredOperationJoinKHR( + VkDevice device, + VkDeferredOperationKHR operation); +#endif + + +#define VK_KHR_pipeline_library 1 +#define VK_KHR_PIPELINE_LIBRARY_SPEC_VERSION 1 +#define VK_KHR_PIPELINE_LIBRARY_EXTENSION_NAME "VK_KHR_pipeline_library" +typedef struct VkPipelineLibraryCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t libraryCount; + const VkPipeline* pLibraries; +} VkPipelineLibraryCreateInfoKHR; + + + +#define VK_KHR_ray_tracing 1 +#define VK_KHR_RAY_TRACING_SPEC_VERSION 8 +#define VK_KHR_RAY_TRACING_EXTENSION_NAME "VK_KHR_ray_tracing" + +typedef enum VkAccelerationStructureBuildTypeKHR { + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR = 0, + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR = 1, + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR = 2, + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureBuildTypeKHR; +typedef union VkDeviceOrHostAddressKHR { + VkDeviceAddress deviceAddress; + void* hostAddress; +} VkDeviceOrHostAddressKHR; + +typedef union VkDeviceOrHostAddressConstKHR { + VkDeviceAddress deviceAddress; + const void* hostAddress; +} VkDeviceOrHostAddressConstKHR; + +typedef struct VkAccelerationStructureBuildOffsetInfoKHR { + uint32_t primitiveCount; + uint32_t primitiveOffset; + uint32_t firstVertex; + uint32_t transformOffset; +} VkAccelerationStructureBuildOffsetInfoKHR; + +typedef struct VkRayTracingShaderGroupCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkRayTracingShaderGroupTypeKHR type; + uint32_t generalShader; + uint32_t closestHitShader; + uint32_t anyHitShader; + uint32_t intersectionShader; + const void* pShaderGroupCaptureReplayHandle; +} VkRayTracingShaderGroupCreateInfoKHR; + +typedef struct VkRayTracingPipelineInterfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t maxPayloadSize; + uint32_t maxAttributeSize; + uint32_t maxCallableSize; +} VkRayTracingPipelineInterfaceCreateInfoKHR; + +typedef struct VkRayTracingPipelineCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + uint32_t groupCount; + const VkRayTracingShaderGroupCreateInfoKHR* pGroups; + uint32_t maxRecursionDepth; + VkPipelineLibraryCreateInfoKHR libraries; + const VkRayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkRayTracingPipelineCreateInfoKHR; + +typedef struct VkAccelerationStructureGeometryTrianglesDataKHR { + VkStructureType sType; + const void* pNext; + VkFormat vertexFormat; + VkDeviceOrHostAddressConstKHR vertexData; + VkDeviceSize vertexStride; + VkIndexType indexType; + VkDeviceOrHostAddressConstKHR indexData; + VkDeviceOrHostAddressConstKHR transformData; +} VkAccelerationStructureGeometryTrianglesDataKHR; + +typedef struct VkAccelerationStructureGeometryAabbsDataKHR { + VkStructureType sType; + const void* pNext; + VkDeviceOrHostAddressConstKHR data; + VkDeviceSize stride; +} VkAccelerationStructureGeometryAabbsDataKHR; + +typedef struct VkAccelerationStructureGeometryInstancesDataKHR { + VkStructureType sType; + const void* pNext; + VkBool32 arrayOfPointers; + VkDeviceOrHostAddressConstKHR data; +} VkAccelerationStructureGeometryInstancesDataKHR; + +typedef union VkAccelerationStructureGeometryDataKHR { + VkAccelerationStructureGeometryTrianglesDataKHR triangles; + VkAccelerationStructureGeometryAabbsDataKHR aabbs; + VkAccelerationStructureGeometryInstancesDataKHR instances; +} VkAccelerationStructureGeometryDataKHR; + +typedef struct VkAccelerationStructureGeometryKHR { + VkStructureType sType; + const void* pNext; + VkGeometryTypeKHR geometryType; + VkAccelerationStructureGeometryDataKHR geometry; + VkGeometryFlagsKHR flags; +} VkAccelerationStructureGeometryKHR; + +typedef struct VkAccelerationStructureBuildGeometryInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureTypeKHR type; + VkBuildAccelerationStructureFlagsKHR flags; + VkBool32 update; + VkAccelerationStructureKHR srcAccelerationStructure; + VkAccelerationStructureKHR dstAccelerationStructure; + VkBool32 geometryArrayOfPointers; + uint32_t geometryCount; + const VkAccelerationStructureGeometryKHR* const* ppGeometries; + VkDeviceOrHostAddressKHR scratchData; +} VkAccelerationStructureBuildGeometryInfoKHR; + +typedef struct VkAccelerationStructureCreateGeometryTypeInfoKHR { + VkStructureType sType; + const void* pNext; + VkGeometryTypeKHR geometryType; + uint32_t maxPrimitiveCount; + VkIndexType indexType; + uint32_t maxVertexCount; + VkFormat vertexFormat; + VkBool32 allowsTransforms; +} VkAccelerationStructureCreateGeometryTypeInfoKHR; + +typedef struct VkAccelerationStructureCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceSize compactedSize; + VkAccelerationStructureTypeKHR type; + VkBuildAccelerationStructureFlagsKHR flags; + uint32_t maxGeometryCount; + const VkAccelerationStructureCreateGeometryTypeInfoKHR* pGeometryInfos; + VkDeviceAddress deviceAddress; +} VkAccelerationStructureCreateInfoKHR; + +typedef struct VkAccelerationStructureMemoryRequirementsInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureMemoryRequirementsTypeKHR type; + VkAccelerationStructureBuildTypeKHR buildType; + VkAccelerationStructureKHR accelerationStructure; +} VkAccelerationStructureMemoryRequirementsInfoKHR; + +typedef struct VkPhysicalDeviceRayTracingFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 rayTracing; + VkBool32 rayTracingShaderGroupHandleCaptureReplay; + VkBool32 rayTracingShaderGroupHandleCaptureReplayMixed; + VkBool32 rayTracingAccelerationStructureCaptureReplay; + VkBool32 rayTracingIndirectTraceRays; + VkBool32 rayTracingIndirectAccelerationStructureBuild; + VkBool32 rayTracingHostAccelerationStructureCommands; + VkBool32 rayQuery; + VkBool32 rayTracingPrimitiveCulling; +} VkPhysicalDeviceRayTracingFeaturesKHR; + +typedef struct VkPhysicalDeviceRayTracingPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t shaderGroupHandleSize; + uint32_t maxRecursionDepth; + uint32_t maxShaderGroupStride; + uint32_t shaderGroupBaseAlignment; + uint64_t maxGeometryCount; + uint64_t maxInstanceCount; + uint64_t maxPrimitiveCount; + uint32_t maxDescriptorSetAccelerationStructures; + uint32_t shaderGroupHandleCaptureReplaySize; +} VkPhysicalDeviceRayTracingPropertiesKHR; + +typedef struct VkAccelerationStructureDeviceAddressInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureKHR accelerationStructure; +} VkAccelerationStructureDeviceAddressInfoKHR; + +typedef struct VkAccelerationStructureVersionKHR { + VkStructureType sType; + const void* pNext; + const uint8_t* versionData; +} VkAccelerationStructureVersionKHR; + +typedef struct VkStridedBufferRegionKHR { + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize stride; + VkDeviceSize size; +} VkStridedBufferRegionKHR; + +typedef struct VkTraceRaysIndirectCommandKHR { + uint32_t width; + uint32_t height; + uint32_t depth; +} VkTraceRaysIndirectCommandKHR; + +typedef struct VkCopyAccelerationStructureToMemoryInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureKHR src; + VkDeviceOrHostAddressKHR dst; + VkCopyAccelerationStructureModeKHR mode; +} VkCopyAccelerationStructureToMemoryInfoKHR; + +typedef struct VkCopyMemoryToAccelerationStructureInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceOrHostAddressConstKHR src; + VkAccelerationStructureKHR dst; + VkCopyAccelerationStructureModeKHR mode; +} VkCopyMemoryToAccelerationStructureInfoKHR; + +typedef struct VkCopyAccelerationStructureInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureKHR src; + VkAccelerationStructureKHR dst; + VkCopyAccelerationStructureModeKHR mode; +} VkCopyAccelerationStructureInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateAccelerationStructureKHR)(VkDevice device, const VkAccelerationStructureCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureKHR* pAccelerationStructure); +typedef void (VKAPI_PTR *PFN_vkGetAccelerationStructureMemoryRequirementsKHR)(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoKHR* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructureKHR)(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos); +typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructureIndirectKHR)(VkCommandBuffer commandBuffer, const VkAccelerationStructureBuildGeometryInfoKHR* pInfo, VkBuffer indirectBuffer, VkDeviceSize indirectOffset, uint32_t indirectStride); +typedef VkResult (VKAPI_PTR *PFN_vkBuildAccelerationStructureKHR)(VkDevice device, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos); +typedef VkResult (VKAPI_PTR *PFN_vkCopyAccelerationStructureKHR)(VkDevice device, const VkCopyAccelerationStructureInfoKHR* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyAccelerationStructureToMemoryKHR)(VkDevice device, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToAccelerationStructureKHR)(VkDevice device, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkWriteAccelerationStructuresPropertiesKHR)(VkDevice device, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, size_t dataSize, void* pData, size_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureKHR)(VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureToMemoryKHR)(VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryToAccelerationStructureKHR)(VkCommandBuffer commandBuffer, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysKHR)(VkCommandBuffer commandBuffer, const VkStridedBufferRegionKHR* pRaygenShaderBindingTable, const VkStridedBufferRegionKHR* pMissShaderBindingTable, const VkStridedBufferRegionKHR* pHitShaderBindingTable, const VkStridedBufferRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth); +typedef VkResult (VKAPI_PTR *PFN_vkCreateRayTracingPipelinesKHR)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetAccelerationStructureDeviceAddressKHR)(VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysIndirectKHR)(VkCommandBuffer commandBuffer, const VkStridedBufferRegionKHR* pRaygenShaderBindingTable, const VkStridedBufferRegionKHR* pMissShaderBindingTable, const VkStridedBufferRegionKHR* pHitShaderBindingTable, const VkStridedBufferRegionKHR* pCallableShaderBindingTable, VkBuffer buffer, VkDeviceSize offset); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceAccelerationStructureCompatibilityKHR)(VkDevice device, const VkAccelerationStructureVersionKHR* version); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateAccelerationStructureKHR( + VkDevice device, + const VkAccelerationStructureCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkAccelerationStructureKHR* pAccelerationStructure); + +VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureMemoryRequirementsKHR( + VkDevice device, + const VkAccelerationStructureMemoryRequirementsInfoKHR* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructureKHR( + VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, + const VkAccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos); + +VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructureIndirectKHR( + VkCommandBuffer commandBuffer, + const VkAccelerationStructureBuildGeometryInfoKHR* pInfo, + VkBuffer indirectBuffer, + VkDeviceSize indirectOffset, + uint32_t indirectStride); + +VKAPI_ATTR VkResult VKAPI_CALL vkBuildAccelerationStructureKHR( + VkDevice device, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, + const VkAccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyAccelerationStructureKHR( + VkDevice device, + const VkCopyAccelerationStructureInfoKHR* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyAccelerationStructureToMemoryKHR( + VkDevice device, + const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToAccelerationStructureKHR( + VkDevice device, + const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkWriteAccelerationStructuresPropertiesKHR( + VkDevice device, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR* pAccelerationStructures, + VkQueryType queryType, + size_t dataSize, + void* pData, + size_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureKHR( + VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureInfoKHR* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureToMemoryKHR( + VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryToAccelerationStructureKHR( + VkCommandBuffer commandBuffer, + const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysKHR( + VkCommandBuffer commandBuffer, + const VkStridedBufferRegionKHR* pRaygenShaderBindingTable, + const VkStridedBufferRegionKHR* pMissShaderBindingTable, + const VkStridedBufferRegionKHR* pHitShaderBindingTable, + const VkStridedBufferRegionKHR* pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRayTracingPipelinesKHR( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetAccelerationStructureDeviceAddressKHR( + VkDevice device, + const VkAccelerationStructureDeviceAddressInfoKHR* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysIndirectKHR( + VkCommandBuffer commandBuffer, + const VkStridedBufferRegionKHR* pRaygenShaderBindingTable, + const VkStridedBufferRegionKHR* pMissShaderBindingTable, + const VkStridedBufferRegionKHR* pHitShaderBindingTable, + const VkStridedBufferRegionKHR* pCallableShaderBindingTable, + VkBuffer buffer, + VkDeviceSize offset); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceAccelerationStructureCompatibilityKHR( + VkDevice device, + const VkAccelerationStructureVersionKHR* version); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/thirdparty/vulkan/include/vulkan/vulkan_core.h b/thirdparty/vulkan/include/vulkan/vulkan_core.h index ea96fc43ed3..ac904bca213 100644 --- a/thirdparty/vulkan/include/vulkan/vulkan_core.h +++ b/thirdparty/vulkan/include/vulkan/vulkan_core.h @@ -2,19 +2,9 @@ #define VULKAN_CORE_H_ 1 /* -** Copyright (c) 2015-2019 The Khronos Group Inc. +** Copyright (c) 2015-2020 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ /* @@ -31,24 +21,6 @@ extern "C" { #define VK_VERSION_1_0 1 #include "vk_platform.h" -#define VK_MAKE_VERSION(major, minor, patch) \ - (((major) << 22) | ((minor) << 12) | (patch)) - -// DEPRECATED: This define has been removed. Specific version defines (e.g. VK_API_VERSION_1_0), or the VK_MAKE_VERSION macro, should be used instead. -//#define VK_API_VERSION VK_MAKE_VERSION(1, 0, 0) // Patch version should always be set to 0 - -// Vulkan 1.0 version number -#define VK_API_VERSION_1_0 VK_MAKE_VERSION(1, 0, 0)// Patch version should always be set to 0 - -#define VK_VERSION_MAJOR(version) ((uint32_t)(version) >> 22) -#define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3ff) -#define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xfff) -// Version of this file -#define VK_HEADER_VERSION 131 - - -#define VK_NULL_HANDLE 0 - #define VK_DEFINE_HANDLE(object) typedef struct object##_T* object; @@ -61,10 +33,34 @@ extern "C" { #endif #endif -typedef uint32_t VkFlags; +#define VK_MAKE_VERSION(major, minor, patch) \ + ((((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch))) + +// DEPRECATED: This define has been removed. Specific version defines (e.g. VK_API_VERSION_1_0), or the VK_MAKE_VERSION macro, should be used instead. +//#define VK_API_VERSION VK_MAKE_VERSION(1, 0, 0) // Patch version should always be set to 0 + +// Vulkan 1.0 version number +#define VK_API_VERSION_1_0 VK_MAKE_VERSION(1, 0, 0)// Patch version should always be set to 0 + +// Version of this file +#define VK_HEADER_VERSION 154 + +// Complete version of this file +#define VK_HEADER_VERSION_COMPLETE VK_MAKE_VERSION(1, 2, VK_HEADER_VERSION) + +#define VK_VERSION_MAJOR(version) ((uint32_t)(version) >> 22) +#define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3ff) +#define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xfff) + +#define VK_NULL_HANDLE 0 + typedef uint32_t VkBool32; +typedef uint64_t VkDeviceAddress; typedef uint64_t VkDeviceSize; +typedef uint32_t VkFlags; typedef uint32_t VkSampleMask; +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBuffer) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImage) VK_DEFINE_HANDLE(VkInstance) VK_DEFINE_HANDLE(VkPhysicalDevice) VK_DEFINE_HANDLE(VkDevice) @@ -73,8 +69,6 @@ VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSemaphore) VK_DEFINE_HANDLE(VkCommandBuffer) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFence) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeviceMemory) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBuffer) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImage) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkEvent) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkQueryPool) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferView) @@ -82,38 +76,30 @@ VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImageView) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkShaderModule) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineCache) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineLayout) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkRenderPass) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipeline) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkRenderPass) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSetLayout) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSampler) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorPool) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSet) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorPool) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFramebuffer) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCommandPool) -#define VK_LOD_CLAMP_NONE 1000.0f -#define VK_REMAINING_MIP_LEVELS (~0U) -#define VK_REMAINING_ARRAY_LAYERS (~0U) -#define VK_WHOLE_SIZE (~0ULL) #define VK_ATTACHMENT_UNUSED (~0U) -#define VK_TRUE 1 #define VK_FALSE 0 +#define VK_LOD_CLAMP_NONE 1000.0f #define VK_QUEUE_FAMILY_IGNORED (~0U) +#define VK_REMAINING_ARRAY_LAYERS (~0U) +#define VK_REMAINING_MIP_LEVELS (~0U) #define VK_SUBPASS_EXTERNAL (~0U) -#define VK_MAX_PHYSICAL_DEVICE_NAME_SIZE 256 -#define VK_UUID_SIZE 16 +#define VK_TRUE 1 +#define VK_WHOLE_SIZE (~0ULL) #define VK_MAX_MEMORY_TYPES 32 #define VK_MAX_MEMORY_HEAPS 16 +#define VK_MAX_PHYSICAL_DEVICE_NAME_SIZE 256 +#define VK_UUID_SIZE 16 #define VK_MAX_EXTENSION_NAME_SIZE 256 #define VK_MAX_DESCRIPTION_SIZE 256 -typedef enum VkPipelineCacheHeaderVersion { - VK_PIPELINE_CACHE_HEADER_VERSION_ONE = 1, - VK_PIPELINE_CACHE_HEADER_VERSION_BEGIN_RANGE = VK_PIPELINE_CACHE_HEADER_VERSION_ONE, - VK_PIPELINE_CACHE_HEADER_VERSION_END_RANGE = VK_PIPELINE_CACHE_HEADER_VERSION_ONE, - VK_PIPELINE_CACHE_HEADER_VERSION_RANGE_SIZE = (VK_PIPELINE_CACHE_HEADER_VERSION_ONE - VK_PIPELINE_CACHE_HEADER_VERSION_ONE + 1), - VK_PIPELINE_CACHE_HEADER_VERSION_MAX_ENUM = 0x7FFFFFFF -} VkPipelineCacheHeaderVersion; - typedef enum VkResult { VK_SUCCESS = 0, VK_NOT_READY = 1, @@ -145,17 +131,21 @@ typedef enum VkResult { VK_ERROR_INCOMPATIBLE_DISPLAY_KHR = -1000003001, VK_ERROR_VALIDATION_FAILED_EXT = -1000011001, VK_ERROR_INVALID_SHADER_NV = -1000012000, + VK_ERROR_INCOMPATIBLE_VERSION_KHR = -1000150000, VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT = -1000158000, VK_ERROR_NOT_PERMITTED_EXT = -1000174001, VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT = -1000255000, + VK_THREAD_IDLE_KHR = 1000268000, + VK_THREAD_DONE_KHR = 1000268001, + VK_OPERATION_DEFERRED_KHR = 1000268002, + VK_OPERATION_NOT_DEFERRED_KHR = 1000268003, + VK_PIPELINE_COMPILE_REQUIRED_EXT = 1000297000, VK_ERROR_OUT_OF_POOL_MEMORY_KHR = VK_ERROR_OUT_OF_POOL_MEMORY, VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR = VK_ERROR_INVALID_EXTERNAL_HANDLE, VK_ERROR_FRAGMENTATION_EXT = VK_ERROR_FRAGMENTATION, VK_ERROR_INVALID_DEVICE_ADDRESS_EXT = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS, VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS, - VK_RESULT_BEGIN_RANGE = VK_ERROR_UNKNOWN, - VK_RESULT_END_RANGE = VK_INCOMPLETE, - VK_RESULT_RANGE_SIZE = (VK_INCOMPLETE - VK_ERROR_UNKNOWN + 1), + VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT = VK_PIPELINE_COMPILE_REQUIRED_EXT, VK_RESULT_MAX_ENUM = 0x7FFFFFFF } VkResult; @@ -352,6 +342,7 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT = 1000028001, VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT = 1000028002, VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX = 1000030000, + VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX = 1000030001, VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD = 1000041000, VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP = 1000049000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV = 1000050000, @@ -384,12 +375,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT = 1000081001, VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT = 1000081002, VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR = 1000084000, - VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX = 1000086000, - VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX = 1000086001, - VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX = 1000086002, - VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX = 1000086003, - VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX = 1000086004, - VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX = 1000086005, VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV = 1000087000, VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT = 1000090000, VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT = 1000091000, @@ -453,6 +438,26 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT = 1000148001, VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT = 1000148002, VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV = 1000149000, + VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_KHR = 1000165006, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR = 1000165007, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR = 1000150000, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_GEOMETRY_TYPE_INFO_KHR = 1000150001, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR = 1000150002, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR = 1000150003, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR = 1000150004, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR = 1000150005, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR = 1000150006, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR = 1000150008, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_KHR = 1000150009, + VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR = 1000150010, + VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR = 1000150011, + VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR = 1000150012, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_FEATURES_KHR = 1000150013, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_KHR = 1000150014, + VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR = 1000150015, + VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR = 1000150016, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR = 1000150017, + VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR = 1000150018, VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV = 1000152000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV = 1000154000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV = 1000154001, @@ -464,6 +469,8 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT = 1000158005, VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160000, VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR = 1000163000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR = 1000163001, VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV = 1000164000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV = 1000164001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV = 1000164002, @@ -473,8 +480,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_GEOMETRY_NV = 1000165003, VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV = 1000165004, VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV = 1000165005, - VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV = 1000165006, - VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV = 1000165007, VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV = 1000165008, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV = 1000165009, VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV = 1000165011, @@ -507,7 +512,7 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV = 1000206000, VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV = 1000206001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL = 1000209000, - VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL = 1000210000, + VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL = 1000210000, VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL = 1000210001, VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL = 1000210002, VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL = 1000210003, @@ -550,7 +555,10 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT = 1000259000, VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT = 1000259001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT = 1000259002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT = 1000260000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT = 1000265000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT = 1000267000, + VK_STRUCTURE_TYPE_DEFERRED_OPERATION_INFO_KHR = 1000268000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR = 1000269000, VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR = 1000269001, VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR = 1000269002, @@ -558,8 +566,46 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR = 1000269004, VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR = 1000269005, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT = 1000276000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV = 1000277000, + VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV = 1000277001, + VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV = 1000277002, + VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV = 1000277003, + VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV = 1000277004, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV = 1000277005, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV = 1000277006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV = 1000277007, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT = 1000281000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT = 1000281001, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM = 1000282000, + VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM = 1000282001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT = 1000286000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT = 1000286001, + VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT = 1000287000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT = 1000287001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT = 1000287002, + VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR = 1000290000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT = 1000295000, + VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT = 1000295001, + VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT = 1000295002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT = 1000297000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV = 1000300000, + VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV = 1000300001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT = 1000332000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT = 1000332001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT = 1000335000, + VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR = 1000337000, + VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR = 1000337001, + VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR = 1000337002, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR = 1000337003, + VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR = 1000337004, + VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR = 1000337005, + VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR = 1000337006, + VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR = 1000337007, + VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR = 1000337008, + VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR = 1000337009, + VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR = 1000337010, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT = 1000340000, + VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT = 1000346000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, @@ -618,8 +664,8 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO, VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR, VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS, VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES, @@ -643,6 +689,8 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT, + VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_KHR, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES, @@ -658,6 +706,7 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO, VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO, VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO, + VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES, @@ -673,29 +722,110 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO, VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES, - VK_STRUCTURE_TYPE_BEGIN_RANGE = VK_STRUCTURE_TYPE_APPLICATION_INFO, - VK_STRUCTURE_TYPE_END_RANGE = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO, - VK_STRUCTURE_TYPE_RANGE_SIZE = (VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO - VK_STRUCTURE_TYPE_APPLICATION_INFO + 1), VK_STRUCTURE_TYPE_MAX_ENUM = 0x7FFFFFFF } VkStructureType; +typedef enum VkImageLayout { + VK_IMAGE_LAYOUT_UNDEFINED = 0, + VK_IMAGE_LAYOUT_GENERAL = 1, + VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL = 2, + VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL = 3, + VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL = 4, + VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL = 5, + VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL = 6, + VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL = 7, + VK_IMAGE_LAYOUT_PREINITIALIZED = 8, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL = 1000117000, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL = 1000117001, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL = 1000241000, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL = 1000241001, + VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL = 1000241002, + VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL = 1000241003, + VK_IMAGE_LAYOUT_PRESENT_SRC_KHR = 1000001002, + VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR = 1000111000, + VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV = 1000164003, + VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT = 1000218000, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, + VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL, + VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL, + VK_IMAGE_LAYOUT_MAX_ENUM = 0x7FFFFFFF +} VkImageLayout; + +typedef enum VkObjectType { + VK_OBJECT_TYPE_UNKNOWN = 0, + VK_OBJECT_TYPE_INSTANCE = 1, + VK_OBJECT_TYPE_PHYSICAL_DEVICE = 2, + VK_OBJECT_TYPE_DEVICE = 3, + VK_OBJECT_TYPE_QUEUE = 4, + VK_OBJECT_TYPE_SEMAPHORE = 5, + VK_OBJECT_TYPE_COMMAND_BUFFER = 6, + VK_OBJECT_TYPE_FENCE = 7, + VK_OBJECT_TYPE_DEVICE_MEMORY = 8, + VK_OBJECT_TYPE_BUFFER = 9, + VK_OBJECT_TYPE_IMAGE = 10, + VK_OBJECT_TYPE_EVENT = 11, + VK_OBJECT_TYPE_QUERY_POOL = 12, + VK_OBJECT_TYPE_BUFFER_VIEW = 13, + VK_OBJECT_TYPE_IMAGE_VIEW = 14, + VK_OBJECT_TYPE_SHADER_MODULE = 15, + VK_OBJECT_TYPE_PIPELINE_CACHE = 16, + VK_OBJECT_TYPE_PIPELINE_LAYOUT = 17, + VK_OBJECT_TYPE_RENDER_PASS = 18, + VK_OBJECT_TYPE_PIPELINE = 19, + VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT = 20, + VK_OBJECT_TYPE_SAMPLER = 21, + VK_OBJECT_TYPE_DESCRIPTOR_POOL = 22, + VK_OBJECT_TYPE_DESCRIPTOR_SET = 23, + VK_OBJECT_TYPE_FRAMEBUFFER = 24, + VK_OBJECT_TYPE_COMMAND_POOL = 25, + VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION = 1000156000, + VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE = 1000085000, + VK_OBJECT_TYPE_SURFACE_KHR = 1000000000, + VK_OBJECT_TYPE_SWAPCHAIN_KHR = 1000001000, + VK_OBJECT_TYPE_DISPLAY_KHR = 1000002000, + VK_OBJECT_TYPE_DISPLAY_MODE_KHR = 1000002001, + VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT = 1000011000, + VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT = 1000128000, + VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR = 1000165000, + VK_OBJECT_TYPE_VALIDATION_CACHE_EXT = 1000160000, + VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL = 1000210000, + VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR = 1000268000, + VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV = 1000277000, + VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT = 1000295000, + VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, + VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, + VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR, + VK_OBJECT_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkObjectType; + +typedef enum VkVendorId { + VK_VENDOR_ID_VIV = 0x10001, + VK_VENDOR_ID_VSI = 0x10002, + VK_VENDOR_ID_KAZAN = 0x10003, + VK_VENDOR_ID_CODEPLAY = 0x10004, + VK_VENDOR_ID_MESA = 0x10005, + VK_VENDOR_ID_MAX_ENUM = 0x7FFFFFFF +} VkVendorId; + +typedef enum VkPipelineCacheHeaderVersion { + VK_PIPELINE_CACHE_HEADER_VERSION_ONE = 1, + VK_PIPELINE_CACHE_HEADER_VERSION_MAX_ENUM = 0x7FFFFFFF +} VkPipelineCacheHeaderVersion; + typedef enum VkSystemAllocationScope { VK_SYSTEM_ALLOCATION_SCOPE_COMMAND = 0, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT = 1, VK_SYSTEM_ALLOCATION_SCOPE_CACHE = 2, VK_SYSTEM_ALLOCATION_SCOPE_DEVICE = 3, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE = 4, - VK_SYSTEM_ALLOCATION_SCOPE_BEGIN_RANGE = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND, - VK_SYSTEM_ALLOCATION_SCOPE_END_RANGE = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE, - VK_SYSTEM_ALLOCATION_SCOPE_RANGE_SIZE = (VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE - VK_SYSTEM_ALLOCATION_SCOPE_COMMAND + 1), VK_SYSTEM_ALLOCATION_SCOPE_MAX_ENUM = 0x7FFFFFFF } VkSystemAllocationScope; typedef enum VkInternalAllocationType { VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE = 0, - VK_INTERNAL_ALLOCATION_TYPE_BEGIN_RANGE = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE, - VK_INTERNAL_ALLOCATION_TYPE_END_RANGE = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE, - VK_INTERNAL_ALLOCATION_TYPE_RANGE_SIZE = (VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE - VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE + 1), VK_INTERNAL_ALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF } VkInternalAllocationType; @@ -941,6 +1071,8 @@ typedef enum VkFormat { VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT = 1000066011, VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT = 1000066012, VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT = 1000066013, + VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT = 1000340000, + VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT = 1000340001, VK_FORMAT_G8B8G8R8_422_UNORM_KHR = VK_FORMAT_G8B8G8R8_422_UNORM, VK_FORMAT_B8G8R8G8_422_UNORM_KHR = VK_FORMAT_B8G8R8G8_422_UNORM, VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, @@ -975,41 +1107,29 @@ typedef enum VkFormat { VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM, VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM, - VK_FORMAT_BEGIN_RANGE = VK_FORMAT_UNDEFINED, - VK_FORMAT_END_RANGE = VK_FORMAT_ASTC_12x12_SRGB_BLOCK, - VK_FORMAT_RANGE_SIZE = (VK_FORMAT_ASTC_12x12_SRGB_BLOCK - VK_FORMAT_UNDEFINED + 1), VK_FORMAT_MAX_ENUM = 0x7FFFFFFF } VkFormat; -typedef enum VkImageType { - VK_IMAGE_TYPE_1D = 0, - VK_IMAGE_TYPE_2D = 1, - VK_IMAGE_TYPE_3D = 2, - VK_IMAGE_TYPE_BEGIN_RANGE = VK_IMAGE_TYPE_1D, - VK_IMAGE_TYPE_END_RANGE = VK_IMAGE_TYPE_3D, - VK_IMAGE_TYPE_RANGE_SIZE = (VK_IMAGE_TYPE_3D - VK_IMAGE_TYPE_1D + 1), - VK_IMAGE_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkImageType; - typedef enum VkImageTiling { VK_IMAGE_TILING_OPTIMAL = 0, VK_IMAGE_TILING_LINEAR = 1, VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT = 1000158000, - VK_IMAGE_TILING_BEGIN_RANGE = VK_IMAGE_TILING_OPTIMAL, - VK_IMAGE_TILING_END_RANGE = VK_IMAGE_TILING_LINEAR, - VK_IMAGE_TILING_RANGE_SIZE = (VK_IMAGE_TILING_LINEAR - VK_IMAGE_TILING_OPTIMAL + 1), VK_IMAGE_TILING_MAX_ENUM = 0x7FFFFFFF } VkImageTiling; +typedef enum VkImageType { + VK_IMAGE_TYPE_1D = 0, + VK_IMAGE_TYPE_2D = 1, + VK_IMAGE_TYPE_3D = 2, + VK_IMAGE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkImageType; + typedef enum VkPhysicalDeviceType { VK_PHYSICAL_DEVICE_TYPE_OTHER = 0, VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU = 1, VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU = 2, VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU = 3, VK_PHYSICAL_DEVICE_TYPE_CPU = 4, - VK_PHYSICAL_DEVICE_TYPE_BEGIN_RANGE = VK_PHYSICAL_DEVICE_TYPE_OTHER, - VK_PHYSICAL_DEVICE_TYPE_END_RANGE = VK_PHYSICAL_DEVICE_TYPE_CPU, - VK_PHYSICAL_DEVICE_TYPE_RANGE_SIZE = (VK_PHYSICAL_DEVICE_TYPE_CPU - VK_PHYSICAL_DEVICE_TYPE_OTHER + 1), VK_PHYSICAL_DEVICE_TYPE_MAX_ENUM = 0x7FFFFFFF } VkPhysicalDeviceType; @@ -1019,69 +1139,19 @@ typedef enum VkQueryType { VK_QUERY_TYPE_TIMESTAMP = 2, VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT = 1000028004, VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR = 1000116000, - VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV = 1000165000, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR = 1000165000, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR = 1000150000, VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL = 1000210000, - VK_QUERY_TYPE_BEGIN_RANGE = VK_QUERY_TYPE_OCCLUSION, - VK_QUERY_TYPE_END_RANGE = VK_QUERY_TYPE_TIMESTAMP, - VK_QUERY_TYPE_RANGE_SIZE = (VK_QUERY_TYPE_TIMESTAMP - VK_QUERY_TYPE_OCCLUSION + 1), + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR, VK_QUERY_TYPE_MAX_ENUM = 0x7FFFFFFF } VkQueryType; typedef enum VkSharingMode { VK_SHARING_MODE_EXCLUSIVE = 0, VK_SHARING_MODE_CONCURRENT = 1, - VK_SHARING_MODE_BEGIN_RANGE = VK_SHARING_MODE_EXCLUSIVE, - VK_SHARING_MODE_END_RANGE = VK_SHARING_MODE_CONCURRENT, - VK_SHARING_MODE_RANGE_SIZE = (VK_SHARING_MODE_CONCURRENT - VK_SHARING_MODE_EXCLUSIVE + 1), VK_SHARING_MODE_MAX_ENUM = 0x7FFFFFFF } VkSharingMode; -typedef enum VkImageLayout { - VK_IMAGE_LAYOUT_UNDEFINED = 0, - VK_IMAGE_LAYOUT_GENERAL = 1, - VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL = 2, - VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL = 3, - VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL = 4, - VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL = 5, - VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL = 6, - VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL = 7, - VK_IMAGE_LAYOUT_PREINITIALIZED = 8, - VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL = 1000117000, - VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL = 1000117001, - VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL = 1000241000, - VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL = 1000241001, - VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL = 1000241002, - VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL = 1000241003, - VK_IMAGE_LAYOUT_PRESENT_SRC_KHR = 1000001002, - VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR = 1000111000, - VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV = 1000164003, - VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT = 1000218000, - VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, - VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, - VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, - VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, - VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL, - VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL, - VK_IMAGE_LAYOUT_BEGIN_RANGE = VK_IMAGE_LAYOUT_UNDEFINED, - VK_IMAGE_LAYOUT_END_RANGE = VK_IMAGE_LAYOUT_PREINITIALIZED, - VK_IMAGE_LAYOUT_RANGE_SIZE = (VK_IMAGE_LAYOUT_PREINITIALIZED - VK_IMAGE_LAYOUT_UNDEFINED + 1), - VK_IMAGE_LAYOUT_MAX_ENUM = 0x7FFFFFFF -} VkImageLayout; - -typedef enum VkImageViewType { - VK_IMAGE_VIEW_TYPE_1D = 0, - VK_IMAGE_VIEW_TYPE_2D = 1, - VK_IMAGE_VIEW_TYPE_3D = 2, - VK_IMAGE_VIEW_TYPE_CUBE = 3, - VK_IMAGE_VIEW_TYPE_1D_ARRAY = 4, - VK_IMAGE_VIEW_TYPE_2D_ARRAY = 5, - VK_IMAGE_VIEW_TYPE_CUBE_ARRAY = 6, - VK_IMAGE_VIEW_TYPE_BEGIN_RANGE = VK_IMAGE_VIEW_TYPE_1D, - VK_IMAGE_VIEW_TYPE_END_RANGE = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, - VK_IMAGE_VIEW_TYPE_RANGE_SIZE = (VK_IMAGE_VIEW_TYPE_CUBE_ARRAY - VK_IMAGE_VIEW_TYPE_1D + 1), - VK_IMAGE_VIEW_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkImageViewType; - typedef enum VkComponentSwizzle { VK_COMPONENT_SWIZZLE_IDENTITY = 0, VK_COMPONENT_SWIZZLE_ZERO = 1, @@ -1090,111 +1160,19 @@ typedef enum VkComponentSwizzle { VK_COMPONENT_SWIZZLE_G = 4, VK_COMPONENT_SWIZZLE_B = 5, VK_COMPONENT_SWIZZLE_A = 6, - VK_COMPONENT_SWIZZLE_BEGIN_RANGE = VK_COMPONENT_SWIZZLE_IDENTITY, - VK_COMPONENT_SWIZZLE_END_RANGE = VK_COMPONENT_SWIZZLE_A, - VK_COMPONENT_SWIZZLE_RANGE_SIZE = (VK_COMPONENT_SWIZZLE_A - VK_COMPONENT_SWIZZLE_IDENTITY + 1), VK_COMPONENT_SWIZZLE_MAX_ENUM = 0x7FFFFFFF } VkComponentSwizzle; -typedef enum VkVertexInputRate { - VK_VERTEX_INPUT_RATE_VERTEX = 0, - VK_VERTEX_INPUT_RATE_INSTANCE = 1, - VK_VERTEX_INPUT_RATE_BEGIN_RANGE = VK_VERTEX_INPUT_RATE_VERTEX, - VK_VERTEX_INPUT_RATE_END_RANGE = VK_VERTEX_INPUT_RATE_INSTANCE, - VK_VERTEX_INPUT_RATE_RANGE_SIZE = (VK_VERTEX_INPUT_RATE_INSTANCE - VK_VERTEX_INPUT_RATE_VERTEX + 1), - VK_VERTEX_INPUT_RATE_MAX_ENUM = 0x7FFFFFFF -} VkVertexInputRate; - -typedef enum VkPrimitiveTopology { - VK_PRIMITIVE_TOPOLOGY_POINT_LIST = 0, - VK_PRIMITIVE_TOPOLOGY_LINE_LIST = 1, - VK_PRIMITIVE_TOPOLOGY_LINE_STRIP = 2, - VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST = 3, - VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP = 4, - VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN = 5, - VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY = 6, - VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY = 7, - VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY = 8, - VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY = 9, - VK_PRIMITIVE_TOPOLOGY_PATCH_LIST = 10, - VK_PRIMITIVE_TOPOLOGY_BEGIN_RANGE = VK_PRIMITIVE_TOPOLOGY_POINT_LIST, - VK_PRIMITIVE_TOPOLOGY_END_RANGE = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, - VK_PRIMITIVE_TOPOLOGY_RANGE_SIZE = (VK_PRIMITIVE_TOPOLOGY_PATCH_LIST - VK_PRIMITIVE_TOPOLOGY_POINT_LIST + 1), - VK_PRIMITIVE_TOPOLOGY_MAX_ENUM = 0x7FFFFFFF -} VkPrimitiveTopology; - -typedef enum VkPolygonMode { - VK_POLYGON_MODE_FILL = 0, - VK_POLYGON_MODE_LINE = 1, - VK_POLYGON_MODE_POINT = 2, - VK_POLYGON_MODE_FILL_RECTANGLE_NV = 1000153000, - VK_POLYGON_MODE_BEGIN_RANGE = VK_POLYGON_MODE_FILL, - VK_POLYGON_MODE_END_RANGE = VK_POLYGON_MODE_POINT, - VK_POLYGON_MODE_RANGE_SIZE = (VK_POLYGON_MODE_POINT - VK_POLYGON_MODE_FILL + 1), - VK_POLYGON_MODE_MAX_ENUM = 0x7FFFFFFF -} VkPolygonMode; - -typedef enum VkFrontFace { - VK_FRONT_FACE_COUNTER_CLOCKWISE = 0, - VK_FRONT_FACE_CLOCKWISE = 1, - VK_FRONT_FACE_BEGIN_RANGE = VK_FRONT_FACE_COUNTER_CLOCKWISE, - VK_FRONT_FACE_END_RANGE = VK_FRONT_FACE_CLOCKWISE, - VK_FRONT_FACE_RANGE_SIZE = (VK_FRONT_FACE_CLOCKWISE - VK_FRONT_FACE_COUNTER_CLOCKWISE + 1), - VK_FRONT_FACE_MAX_ENUM = 0x7FFFFFFF -} VkFrontFace; - -typedef enum VkCompareOp { - VK_COMPARE_OP_NEVER = 0, - VK_COMPARE_OP_LESS = 1, - VK_COMPARE_OP_EQUAL = 2, - VK_COMPARE_OP_LESS_OR_EQUAL = 3, - VK_COMPARE_OP_GREATER = 4, - VK_COMPARE_OP_NOT_EQUAL = 5, - VK_COMPARE_OP_GREATER_OR_EQUAL = 6, - VK_COMPARE_OP_ALWAYS = 7, - VK_COMPARE_OP_BEGIN_RANGE = VK_COMPARE_OP_NEVER, - VK_COMPARE_OP_END_RANGE = VK_COMPARE_OP_ALWAYS, - VK_COMPARE_OP_RANGE_SIZE = (VK_COMPARE_OP_ALWAYS - VK_COMPARE_OP_NEVER + 1), - VK_COMPARE_OP_MAX_ENUM = 0x7FFFFFFF -} VkCompareOp; - -typedef enum VkStencilOp { - VK_STENCIL_OP_KEEP = 0, - VK_STENCIL_OP_ZERO = 1, - VK_STENCIL_OP_REPLACE = 2, - VK_STENCIL_OP_INCREMENT_AND_CLAMP = 3, - VK_STENCIL_OP_DECREMENT_AND_CLAMP = 4, - VK_STENCIL_OP_INVERT = 5, - VK_STENCIL_OP_INCREMENT_AND_WRAP = 6, - VK_STENCIL_OP_DECREMENT_AND_WRAP = 7, - VK_STENCIL_OP_BEGIN_RANGE = VK_STENCIL_OP_KEEP, - VK_STENCIL_OP_END_RANGE = VK_STENCIL_OP_DECREMENT_AND_WRAP, - VK_STENCIL_OP_RANGE_SIZE = (VK_STENCIL_OP_DECREMENT_AND_WRAP - VK_STENCIL_OP_KEEP + 1), - VK_STENCIL_OP_MAX_ENUM = 0x7FFFFFFF -} VkStencilOp; - -typedef enum VkLogicOp { - VK_LOGIC_OP_CLEAR = 0, - VK_LOGIC_OP_AND = 1, - VK_LOGIC_OP_AND_REVERSE = 2, - VK_LOGIC_OP_COPY = 3, - VK_LOGIC_OP_AND_INVERTED = 4, - VK_LOGIC_OP_NO_OP = 5, - VK_LOGIC_OP_XOR = 6, - VK_LOGIC_OP_OR = 7, - VK_LOGIC_OP_NOR = 8, - VK_LOGIC_OP_EQUIVALENT = 9, - VK_LOGIC_OP_INVERT = 10, - VK_LOGIC_OP_OR_REVERSE = 11, - VK_LOGIC_OP_COPY_INVERTED = 12, - VK_LOGIC_OP_OR_INVERTED = 13, - VK_LOGIC_OP_NAND = 14, - VK_LOGIC_OP_SET = 15, - VK_LOGIC_OP_BEGIN_RANGE = VK_LOGIC_OP_CLEAR, - VK_LOGIC_OP_END_RANGE = VK_LOGIC_OP_SET, - VK_LOGIC_OP_RANGE_SIZE = (VK_LOGIC_OP_SET - VK_LOGIC_OP_CLEAR + 1), - VK_LOGIC_OP_MAX_ENUM = 0x7FFFFFFF -} VkLogicOp; +typedef enum VkImageViewType { + VK_IMAGE_VIEW_TYPE_1D = 0, + VK_IMAGE_VIEW_TYPE_2D = 1, + VK_IMAGE_VIEW_TYPE_3D = 2, + VK_IMAGE_VIEW_TYPE_CUBE = 3, + VK_IMAGE_VIEW_TYPE_1D_ARRAY = 4, + VK_IMAGE_VIEW_TYPE_2D_ARRAY = 5, + VK_IMAGE_VIEW_TYPE_CUBE_ARRAY = 6, + VK_IMAGE_VIEW_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkImageViewType; typedef enum VkBlendFactor { VK_BLEND_FACTOR_ZERO = 0, @@ -1216,9 +1194,6 @@ typedef enum VkBlendFactor { VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR = 16, VK_BLEND_FACTOR_SRC1_ALPHA = 17, VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA = 18, - VK_BLEND_FACTOR_BEGIN_RANGE = VK_BLEND_FACTOR_ZERO, - VK_BLEND_FACTOR_END_RANGE = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA, - VK_BLEND_FACTOR_RANGE_SIZE = (VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA - VK_BLEND_FACTOR_ZERO + 1), VK_BLEND_FACTOR_MAX_ENUM = 0x7FFFFFFF } VkBlendFactor; @@ -1274,12 +1249,21 @@ typedef enum VkBlendOp { VK_BLEND_OP_RED_EXT = 1000148043, VK_BLEND_OP_GREEN_EXT = 1000148044, VK_BLEND_OP_BLUE_EXT = 1000148045, - VK_BLEND_OP_BEGIN_RANGE = VK_BLEND_OP_ADD, - VK_BLEND_OP_END_RANGE = VK_BLEND_OP_MAX, - VK_BLEND_OP_RANGE_SIZE = (VK_BLEND_OP_MAX - VK_BLEND_OP_ADD + 1), VK_BLEND_OP_MAX_ENUM = 0x7FFFFFFF } VkBlendOp; +typedef enum VkCompareOp { + VK_COMPARE_OP_NEVER = 0, + VK_COMPARE_OP_LESS = 1, + VK_COMPARE_OP_EQUAL = 2, + VK_COMPARE_OP_LESS_OR_EQUAL = 3, + VK_COMPARE_OP_GREATER = 4, + VK_COMPARE_OP_NOT_EQUAL = 5, + VK_COMPARE_OP_GREATER_OR_EQUAL = 6, + VK_COMPARE_OP_ALWAYS = 7, + VK_COMPARE_OP_MAX_ENUM = 0x7FFFFFFF +} VkCompareOp; + typedef enum VkDynamicState { VK_DYNAMIC_STATE_VIEWPORT = 0, VK_DYNAMIC_STATE_SCISSOR = 1, @@ -1297,44 +1281,87 @@ typedef enum VkDynamicState { VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV = 1000164006, VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV = 1000205001, VK_DYNAMIC_STATE_LINE_STIPPLE_EXT = 1000259000, - VK_DYNAMIC_STATE_BEGIN_RANGE = VK_DYNAMIC_STATE_VIEWPORT, - VK_DYNAMIC_STATE_END_RANGE = VK_DYNAMIC_STATE_STENCIL_REFERENCE, - VK_DYNAMIC_STATE_RANGE_SIZE = (VK_DYNAMIC_STATE_STENCIL_REFERENCE - VK_DYNAMIC_STATE_VIEWPORT + 1), + VK_DYNAMIC_STATE_CULL_MODE_EXT = 1000267000, + VK_DYNAMIC_STATE_FRONT_FACE_EXT = 1000267001, + VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT = 1000267002, + VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT = 1000267003, + VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT = 1000267004, + VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT = 1000267005, + VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT = 1000267006, + VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT = 1000267007, + VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT = 1000267008, + VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT = 1000267009, + VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT = 1000267010, + VK_DYNAMIC_STATE_STENCIL_OP_EXT = 1000267011, VK_DYNAMIC_STATE_MAX_ENUM = 0x7FFFFFFF } VkDynamicState; -typedef enum VkFilter { - VK_FILTER_NEAREST = 0, - VK_FILTER_LINEAR = 1, - VK_FILTER_CUBIC_IMG = 1000015000, - VK_FILTER_CUBIC_EXT = VK_FILTER_CUBIC_IMG, - VK_FILTER_BEGIN_RANGE = VK_FILTER_NEAREST, - VK_FILTER_END_RANGE = VK_FILTER_LINEAR, - VK_FILTER_RANGE_SIZE = (VK_FILTER_LINEAR - VK_FILTER_NEAREST + 1), - VK_FILTER_MAX_ENUM = 0x7FFFFFFF -} VkFilter; +typedef enum VkFrontFace { + VK_FRONT_FACE_COUNTER_CLOCKWISE = 0, + VK_FRONT_FACE_CLOCKWISE = 1, + VK_FRONT_FACE_MAX_ENUM = 0x7FFFFFFF +} VkFrontFace; -typedef enum VkSamplerMipmapMode { - VK_SAMPLER_MIPMAP_MODE_NEAREST = 0, - VK_SAMPLER_MIPMAP_MODE_LINEAR = 1, - VK_SAMPLER_MIPMAP_MODE_BEGIN_RANGE = VK_SAMPLER_MIPMAP_MODE_NEAREST, - VK_SAMPLER_MIPMAP_MODE_END_RANGE = VK_SAMPLER_MIPMAP_MODE_LINEAR, - VK_SAMPLER_MIPMAP_MODE_RANGE_SIZE = (VK_SAMPLER_MIPMAP_MODE_LINEAR - VK_SAMPLER_MIPMAP_MODE_NEAREST + 1), - VK_SAMPLER_MIPMAP_MODE_MAX_ENUM = 0x7FFFFFFF -} VkSamplerMipmapMode; +typedef enum VkVertexInputRate { + VK_VERTEX_INPUT_RATE_VERTEX = 0, + VK_VERTEX_INPUT_RATE_INSTANCE = 1, + VK_VERTEX_INPUT_RATE_MAX_ENUM = 0x7FFFFFFF +} VkVertexInputRate; -typedef enum VkSamplerAddressMode { - VK_SAMPLER_ADDRESS_MODE_REPEAT = 0, - VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT = 1, - VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE = 2, - VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER = 3, - VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE = 4, - VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE, - VK_SAMPLER_ADDRESS_MODE_BEGIN_RANGE = VK_SAMPLER_ADDRESS_MODE_REPEAT, - VK_SAMPLER_ADDRESS_MODE_END_RANGE = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, - VK_SAMPLER_ADDRESS_MODE_RANGE_SIZE = (VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER - VK_SAMPLER_ADDRESS_MODE_REPEAT + 1), - VK_SAMPLER_ADDRESS_MODE_MAX_ENUM = 0x7FFFFFFF -} VkSamplerAddressMode; +typedef enum VkPrimitiveTopology { + VK_PRIMITIVE_TOPOLOGY_POINT_LIST = 0, + VK_PRIMITIVE_TOPOLOGY_LINE_LIST = 1, + VK_PRIMITIVE_TOPOLOGY_LINE_STRIP = 2, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST = 3, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP = 4, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN = 5, + VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY = 6, + VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY = 7, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY = 8, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY = 9, + VK_PRIMITIVE_TOPOLOGY_PATCH_LIST = 10, + VK_PRIMITIVE_TOPOLOGY_MAX_ENUM = 0x7FFFFFFF +} VkPrimitiveTopology; + +typedef enum VkPolygonMode { + VK_POLYGON_MODE_FILL = 0, + VK_POLYGON_MODE_LINE = 1, + VK_POLYGON_MODE_POINT = 2, + VK_POLYGON_MODE_FILL_RECTANGLE_NV = 1000153000, + VK_POLYGON_MODE_MAX_ENUM = 0x7FFFFFFF +} VkPolygonMode; + +typedef enum VkStencilOp { + VK_STENCIL_OP_KEEP = 0, + VK_STENCIL_OP_ZERO = 1, + VK_STENCIL_OP_REPLACE = 2, + VK_STENCIL_OP_INCREMENT_AND_CLAMP = 3, + VK_STENCIL_OP_DECREMENT_AND_CLAMP = 4, + VK_STENCIL_OP_INVERT = 5, + VK_STENCIL_OP_INCREMENT_AND_WRAP = 6, + VK_STENCIL_OP_DECREMENT_AND_WRAP = 7, + VK_STENCIL_OP_MAX_ENUM = 0x7FFFFFFF +} VkStencilOp; + +typedef enum VkLogicOp { + VK_LOGIC_OP_CLEAR = 0, + VK_LOGIC_OP_AND = 1, + VK_LOGIC_OP_AND_REVERSE = 2, + VK_LOGIC_OP_COPY = 3, + VK_LOGIC_OP_AND_INVERTED = 4, + VK_LOGIC_OP_NO_OP = 5, + VK_LOGIC_OP_XOR = 6, + VK_LOGIC_OP_OR = 7, + VK_LOGIC_OP_NOR = 8, + VK_LOGIC_OP_EQUIVALENT = 9, + VK_LOGIC_OP_INVERT = 10, + VK_LOGIC_OP_OR_REVERSE = 11, + VK_LOGIC_OP_COPY_INVERTED = 12, + VK_LOGIC_OP_OR_INVERTED = 13, + VK_LOGIC_OP_NAND = 14, + VK_LOGIC_OP_SET = 15, + VK_LOGIC_OP_MAX_ENUM = 0x7FFFFFFF +} VkLogicOp; typedef enum VkBorderColor { VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK = 0, @@ -1343,12 +1370,35 @@ typedef enum VkBorderColor { VK_BORDER_COLOR_INT_OPAQUE_BLACK = 3, VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE = 4, VK_BORDER_COLOR_INT_OPAQUE_WHITE = 5, - VK_BORDER_COLOR_BEGIN_RANGE = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, - VK_BORDER_COLOR_END_RANGE = VK_BORDER_COLOR_INT_OPAQUE_WHITE, - VK_BORDER_COLOR_RANGE_SIZE = (VK_BORDER_COLOR_INT_OPAQUE_WHITE - VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK + 1), + VK_BORDER_COLOR_FLOAT_CUSTOM_EXT = 1000287003, + VK_BORDER_COLOR_INT_CUSTOM_EXT = 1000287004, VK_BORDER_COLOR_MAX_ENUM = 0x7FFFFFFF } VkBorderColor; +typedef enum VkFilter { + VK_FILTER_NEAREST = 0, + VK_FILTER_LINEAR = 1, + VK_FILTER_CUBIC_IMG = 1000015000, + VK_FILTER_CUBIC_EXT = VK_FILTER_CUBIC_IMG, + VK_FILTER_MAX_ENUM = 0x7FFFFFFF +} VkFilter; + +typedef enum VkSamplerAddressMode { + VK_SAMPLER_ADDRESS_MODE_REPEAT = 0, + VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT = 1, + VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE = 2, + VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER = 3, + VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE = 4, + VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE, + VK_SAMPLER_ADDRESS_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerAddressMode; + +typedef enum VkSamplerMipmapMode { + VK_SAMPLER_MIPMAP_MODE_NEAREST = 0, + VK_SAMPLER_MIPMAP_MODE_LINEAR = 1, + VK_SAMPLER_MIPMAP_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerMipmapMode; + typedef enum VkDescriptorType { VK_DESCRIPTOR_TYPE_SAMPLER = 0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER = 1, @@ -1362,10 +1412,8 @@ typedef enum VkDescriptorType { VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC = 9, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT = 10, VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT = 1000138000, - VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000, - VK_DESCRIPTOR_TYPE_BEGIN_RANGE = VK_DESCRIPTOR_TYPE_SAMPLER, - VK_DESCRIPTOR_TYPE_END_RANGE = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, - VK_DESCRIPTOR_TYPE_RANGE_SIZE = (VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT - VK_DESCRIPTOR_TYPE_SAMPLER + 1), + VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR = 1000165000, + VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, VK_DESCRIPTOR_TYPE_MAX_ENUM = 0x7FFFFFFF } VkDescriptorType; @@ -1373,118 +1421,98 @@ typedef enum VkAttachmentLoadOp { VK_ATTACHMENT_LOAD_OP_LOAD = 0, VK_ATTACHMENT_LOAD_OP_CLEAR = 1, VK_ATTACHMENT_LOAD_OP_DONT_CARE = 2, - VK_ATTACHMENT_LOAD_OP_BEGIN_RANGE = VK_ATTACHMENT_LOAD_OP_LOAD, - VK_ATTACHMENT_LOAD_OP_END_RANGE = VK_ATTACHMENT_LOAD_OP_DONT_CARE, - VK_ATTACHMENT_LOAD_OP_RANGE_SIZE = (VK_ATTACHMENT_LOAD_OP_DONT_CARE - VK_ATTACHMENT_LOAD_OP_LOAD + 1), VK_ATTACHMENT_LOAD_OP_MAX_ENUM = 0x7FFFFFFF } VkAttachmentLoadOp; typedef enum VkAttachmentStoreOp { VK_ATTACHMENT_STORE_OP_STORE = 0, VK_ATTACHMENT_STORE_OP_DONT_CARE = 1, - VK_ATTACHMENT_STORE_OP_BEGIN_RANGE = VK_ATTACHMENT_STORE_OP_STORE, - VK_ATTACHMENT_STORE_OP_END_RANGE = VK_ATTACHMENT_STORE_OP_DONT_CARE, - VK_ATTACHMENT_STORE_OP_RANGE_SIZE = (VK_ATTACHMENT_STORE_OP_DONT_CARE - VK_ATTACHMENT_STORE_OP_STORE + 1), + VK_ATTACHMENT_STORE_OP_NONE_QCOM = 1000301000, VK_ATTACHMENT_STORE_OP_MAX_ENUM = 0x7FFFFFFF } VkAttachmentStoreOp; typedef enum VkPipelineBindPoint { VK_PIPELINE_BIND_POINT_GRAPHICS = 0, VK_PIPELINE_BIND_POINT_COMPUTE = 1, - VK_PIPELINE_BIND_POINT_RAY_TRACING_NV = 1000165000, - VK_PIPELINE_BIND_POINT_BEGIN_RANGE = VK_PIPELINE_BIND_POINT_GRAPHICS, - VK_PIPELINE_BIND_POINT_END_RANGE = VK_PIPELINE_BIND_POINT_COMPUTE, - VK_PIPELINE_BIND_POINT_RANGE_SIZE = (VK_PIPELINE_BIND_POINT_COMPUTE - VK_PIPELINE_BIND_POINT_GRAPHICS + 1), + VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR = 1000165000, + VK_PIPELINE_BIND_POINT_RAY_TRACING_NV = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, VK_PIPELINE_BIND_POINT_MAX_ENUM = 0x7FFFFFFF } VkPipelineBindPoint; typedef enum VkCommandBufferLevel { VK_COMMAND_BUFFER_LEVEL_PRIMARY = 0, VK_COMMAND_BUFFER_LEVEL_SECONDARY = 1, - VK_COMMAND_BUFFER_LEVEL_BEGIN_RANGE = VK_COMMAND_BUFFER_LEVEL_PRIMARY, - VK_COMMAND_BUFFER_LEVEL_END_RANGE = VK_COMMAND_BUFFER_LEVEL_SECONDARY, - VK_COMMAND_BUFFER_LEVEL_RANGE_SIZE = (VK_COMMAND_BUFFER_LEVEL_SECONDARY - VK_COMMAND_BUFFER_LEVEL_PRIMARY + 1), VK_COMMAND_BUFFER_LEVEL_MAX_ENUM = 0x7FFFFFFF } VkCommandBufferLevel; typedef enum VkIndexType { VK_INDEX_TYPE_UINT16 = 0, VK_INDEX_TYPE_UINT32 = 1, - VK_INDEX_TYPE_NONE_NV = 1000165000, + VK_INDEX_TYPE_NONE_KHR = 1000165000, VK_INDEX_TYPE_UINT8_EXT = 1000265000, - VK_INDEX_TYPE_BEGIN_RANGE = VK_INDEX_TYPE_UINT16, - VK_INDEX_TYPE_END_RANGE = VK_INDEX_TYPE_UINT32, - VK_INDEX_TYPE_RANGE_SIZE = (VK_INDEX_TYPE_UINT32 - VK_INDEX_TYPE_UINT16 + 1), + VK_INDEX_TYPE_NONE_NV = VK_INDEX_TYPE_NONE_KHR, VK_INDEX_TYPE_MAX_ENUM = 0x7FFFFFFF } VkIndexType; typedef enum VkSubpassContents { VK_SUBPASS_CONTENTS_INLINE = 0, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS = 1, - VK_SUBPASS_CONTENTS_BEGIN_RANGE = VK_SUBPASS_CONTENTS_INLINE, - VK_SUBPASS_CONTENTS_END_RANGE = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS, - VK_SUBPASS_CONTENTS_RANGE_SIZE = (VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS - VK_SUBPASS_CONTENTS_INLINE + 1), VK_SUBPASS_CONTENTS_MAX_ENUM = 0x7FFFFFFF } VkSubpassContents; -typedef enum VkObjectType { - VK_OBJECT_TYPE_UNKNOWN = 0, - VK_OBJECT_TYPE_INSTANCE = 1, - VK_OBJECT_TYPE_PHYSICAL_DEVICE = 2, - VK_OBJECT_TYPE_DEVICE = 3, - VK_OBJECT_TYPE_QUEUE = 4, - VK_OBJECT_TYPE_SEMAPHORE = 5, - VK_OBJECT_TYPE_COMMAND_BUFFER = 6, - VK_OBJECT_TYPE_FENCE = 7, - VK_OBJECT_TYPE_DEVICE_MEMORY = 8, - VK_OBJECT_TYPE_BUFFER = 9, - VK_OBJECT_TYPE_IMAGE = 10, - VK_OBJECT_TYPE_EVENT = 11, - VK_OBJECT_TYPE_QUERY_POOL = 12, - VK_OBJECT_TYPE_BUFFER_VIEW = 13, - VK_OBJECT_TYPE_IMAGE_VIEW = 14, - VK_OBJECT_TYPE_SHADER_MODULE = 15, - VK_OBJECT_TYPE_PIPELINE_CACHE = 16, - VK_OBJECT_TYPE_PIPELINE_LAYOUT = 17, - VK_OBJECT_TYPE_RENDER_PASS = 18, - VK_OBJECT_TYPE_PIPELINE = 19, - VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT = 20, - VK_OBJECT_TYPE_SAMPLER = 21, - VK_OBJECT_TYPE_DESCRIPTOR_POOL = 22, - VK_OBJECT_TYPE_DESCRIPTOR_SET = 23, - VK_OBJECT_TYPE_FRAMEBUFFER = 24, - VK_OBJECT_TYPE_COMMAND_POOL = 25, - VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION = 1000156000, - VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE = 1000085000, - VK_OBJECT_TYPE_SURFACE_KHR = 1000000000, - VK_OBJECT_TYPE_SWAPCHAIN_KHR = 1000001000, - VK_OBJECT_TYPE_DISPLAY_KHR = 1000002000, - VK_OBJECT_TYPE_DISPLAY_MODE_KHR = 1000002001, - VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT = 1000011000, - VK_OBJECT_TYPE_OBJECT_TABLE_NVX = 1000086000, - VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX = 1000086001, - VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT = 1000128000, - VK_OBJECT_TYPE_VALIDATION_CACHE_EXT = 1000160000, - VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000, - VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL = 1000210000, - VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, - VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, - VK_OBJECT_TYPE_BEGIN_RANGE = VK_OBJECT_TYPE_UNKNOWN, - VK_OBJECT_TYPE_END_RANGE = VK_OBJECT_TYPE_COMMAND_POOL, - VK_OBJECT_TYPE_RANGE_SIZE = (VK_OBJECT_TYPE_COMMAND_POOL - VK_OBJECT_TYPE_UNKNOWN + 1), - VK_OBJECT_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkObjectType; +typedef enum VkAccessFlagBits { + VK_ACCESS_INDIRECT_COMMAND_READ_BIT = 0x00000001, + VK_ACCESS_INDEX_READ_BIT = 0x00000002, + VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004, + VK_ACCESS_UNIFORM_READ_BIT = 0x00000008, + VK_ACCESS_INPUT_ATTACHMENT_READ_BIT = 0x00000010, + VK_ACCESS_SHADER_READ_BIT = 0x00000020, + VK_ACCESS_SHADER_WRITE_BIT = 0x00000040, + VK_ACCESS_COLOR_ATTACHMENT_READ_BIT = 0x00000080, + VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100, + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200, + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400, + VK_ACCESS_TRANSFER_READ_BIT = 0x00000800, + VK_ACCESS_TRANSFER_WRITE_BIT = 0x00001000, + VK_ACCESS_HOST_READ_BIT = 0x00002000, + VK_ACCESS_HOST_WRITE_BIT = 0x00004000, + VK_ACCESS_MEMORY_READ_BIT = 0x00008000, + VK_ACCESS_MEMORY_WRITE_BIT = 0x00010000, + VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 0x02000000, + VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 0x04000000, + VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 0x08000000, + VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT = 0x00100000, + VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000, + VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR = 0x00200000, + VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR = 0x00400000, + VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV = 0x00800000, + VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000, + VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV = 0x00020000, + VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV = 0x00040000, + VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR, + VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, + VK_ACCESS_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkAccessFlagBits; +typedef VkFlags VkAccessFlags; -typedef enum VkVendorId { - VK_VENDOR_ID_VIV = 0x10001, - VK_VENDOR_ID_VSI = 0x10002, - VK_VENDOR_ID_KAZAN = 0x10003, - VK_VENDOR_ID_BEGIN_RANGE = VK_VENDOR_ID_VIV, - VK_VENDOR_ID_END_RANGE = VK_VENDOR_ID_KAZAN, - VK_VENDOR_ID_RANGE_SIZE = (VK_VENDOR_ID_KAZAN - VK_VENDOR_ID_VIV + 1), - VK_VENDOR_ID_MAX_ENUM = 0x7FFFFFFF -} VkVendorId; -typedef VkFlags VkInstanceCreateFlags; +typedef enum VkImageAspectFlagBits { + VK_IMAGE_ASPECT_COLOR_BIT = 0x00000001, + VK_IMAGE_ASPECT_DEPTH_BIT = 0x00000002, + VK_IMAGE_ASPECT_STENCIL_BIT = 0x00000004, + VK_IMAGE_ASPECT_METADATA_BIT = 0x00000008, + VK_IMAGE_ASPECT_PLANE_0_BIT = 0x00000010, + VK_IMAGE_ASPECT_PLANE_1_BIT = 0x00000020, + VK_IMAGE_ASPECT_PLANE_2_BIT = 0x00000040, + VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT = 0x00000080, + VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT = 0x00000100, + VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT = 0x00000200, + VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT = 0x00000400, + VK_IMAGE_ASPECT_PLANE_0_BIT_KHR = VK_IMAGE_ASPECT_PLANE_0_BIT, + VK_IMAGE_ASPECT_PLANE_1_BIT_KHR = VK_IMAGE_ASPECT_PLANE_1_BIT, + VK_IMAGE_ASPECT_PLANE_2_BIT_KHR = VK_IMAGE_ASPECT_PLANE_2_BIT, + VK_IMAGE_ASPECT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageAspectFlagBits; +typedef VkFlags VkImageAspectFlags; typedef enum VkFormatFeatureFlagBits { VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT = 0x00000001, @@ -1511,6 +1539,7 @@ typedef enum VkFormatFeatureFlagBits { VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT = 0x00800000, VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT = 0x00010000, VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG = 0x00002000, + VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR = 0x20000000, VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x01000000, VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT, @@ -1527,21 +1556,6 @@ typedef enum VkFormatFeatureFlagBits { } VkFormatFeatureFlagBits; typedef VkFlags VkFormatFeatureFlags; -typedef enum VkImageUsageFlagBits { - VK_IMAGE_USAGE_TRANSFER_SRC_BIT = 0x00000001, - VK_IMAGE_USAGE_TRANSFER_DST_BIT = 0x00000002, - VK_IMAGE_USAGE_SAMPLED_BIT = 0x00000004, - VK_IMAGE_USAGE_STORAGE_BIT = 0x00000008, - VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT = 0x00000010, - VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000020, - VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT = 0x00000040, - VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT = 0x00000080, - VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV = 0x00000100, - VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x00000200, - VK_IMAGE_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkImageUsageFlagBits; -typedef VkFlags VkImageUsageFlags; - typedef enum VkImageCreateFlagBits { VK_IMAGE_CREATE_SPARSE_BINDING_BIT = 0x00000001, VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002, @@ -1580,15 +1594,29 @@ typedef enum VkSampleCountFlagBits { } VkSampleCountFlagBits; typedef VkFlags VkSampleCountFlags; -typedef enum VkQueueFlagBits { - VK_QUEUE_GRAPHICS_BIT = 0x00000001, - VK_QUEUE_COMPUTE_BIT = 0x00000002, - VK_QUEUE_TRANSFER_BIT = 0x00000004, - VK_QUEUE_SPARSE_BINDING_BIT = 0x00000008, - VK_QUEUE_PROTECTED_BIT = 0x00000010, - VK_QUEUE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkQueueFlagBits; -typedef VkFlags VkQueueFlags; +typedef enum VkImageUsageFlagBits { + VK_IMAGE_USAGE_TRANSFER_SRC_BIT = 0x00000001, + VK_IMAGE_USAGE_TRANSFER_DST_BIT = 0x00000002, + VK_IMAGE_USAGE_SAMPLED_BIT = 0x00000004, + VK_IMAGE_USAGE_STORAGE_BIT = 0x00000008, + VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT = 0x00000010, + VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000020, + VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT = 0x00000040, + VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT = 0x00000080, + VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV = 0x00000100, + VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x00000200, + VK_IMAGE_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageUsageFlagBits; +typedef VkFlags VkImageUsageFlags; +typedef VkFlags VkInstanceCreateFlags; + +typedef enum VkMemoryHeapFlagBits { + VK_MEMORY_HEAP_DEVICE_LOCAL_BIT = 0x00000001, + VK_MEMORY_HEAP_MULTI_INSTANCE_BIT = 0x00000002, + VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT, + VK_MEMORY_HEAP_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryHeapFlagBits; +typedef VkFlags VkMemoryHeapFlags; typedef enum VkMemoryPropertyFlagBits { VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT = 0x00000001, @@ -1603,13 +1631,15 @@ typedef enum VkMemoryPropertyFlagBits { } VkMemoryPropertyFlagBits; typedef VkFlags VkMemoryPropertyFlags; -typedef enum VkMemoryHeapFlagBits { - VK_MEMORY_HEAP_DEVICE_LOCAL_BIT = 0x00000001, - VK_MEMORY_HEAP_MULTI_INSTANCE_BIT = 0x00000002, - VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT, - VK_MEMORY_HEAP_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkMemoryHeapFlagBits; -typedef VkFlags VkMemoryHeapFlags; +typedef enum VkQueueFlagBits { + VK_QUEUE_GRAPHICS_BIT = 0x00000001, + VK_QUEUE_COMPUTE_BIT = 0x00000002, + VK_QUEUE_TRANSFER_BIT = 0x00000004, + VK_QUEUE_SPARSE_BINDING_BIT = 0x00000008, + VK_QUEUE_PROTECTED_BIT = 0x00000010, + VK_QUEUE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueueFlagBits; +typedef VkFlags VkQueueFlags; typedef VkFlags VkDeviceCreateFlags; typedef enum VkDeviceQueueCreateFlagBits { @@ -1638,36 +1668,25 @@ typedef enum VkPipelineStageFlagBits { VK_PIPELINE_STAGE_ALL_COMMANDS_BIT = 0x00010000, VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT = 0x01000000, VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00040000, - VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX = 0x00020000, + VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR = 0x00200000, + VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR = 0x02000000, VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV = 0x00400000, - VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV = 0x00200000, - VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV = 0x02000000, VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV = 0x00080000, VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV = 0x00100000, VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT = 0x00800000, + VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV = 0x00020000, + VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR, + VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, VK_PIPELINE_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkPipelineStageFlagBits; typedef VkFlags VkPipelineStageFlags; typedef VkFlags VkMemoryMapFlags; -typedef enum VkImageAspectFlagBits { - VK_IMAGE_ASPECT_COLOR_BIT = 0x00000001, - VK_IMAGE_ASPECT_DEPTH_BIT = 0x00000002, - VK_IMAGE_ASPECT_STENCIL_BIT = 0x00000004, - VK_IMAGE_ASPECT_METADATA_BIT = 0x00000008, - VK_IMAGE_ASPECT_PLANE_0_BIT = 0x00000010, - VK_IMAGE_ASPECT_PLANE_1_BIT = 0x00000020, - VK_IMAGE_ASPECT_PLANE_2_BIT = 0x00000040, - VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT = 0x00000080, - VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT = 0x00000100, - VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT = 0x00000200, - VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT = 0x00000400, - VK_IMAGE_ASPECT_PLANE_0_BIT_KHR = VK_IMAGE_ASPECT_PLANE_0_BIT, - VK_IMAGE_ASPECT_PLANE_1_BIT_KHR = VK_IMAGE_ASPECT_PLANE_1_BIT, - VK_IMAGE_ASPECT_PLANE_2_BIT_KHR = VK_IMAGE_ASPECT_PLANE_2_BIT, - VK_IMAGE_ASPECT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkImageAspectFlagBits; -typedef VkFlags VkImageAspectFlags; +typedef enum VkSparseMemoryBindFlagBits { + VK_SPARSE_MEMORY_BIND_METADATA_BIT = 0x00000001, + VK_SPARSE_MEMORY_BIND_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSparseMemoryBindFlagBits; +typedef VkFlags VkSparseMemoryBindFlags; typedef enum VkSparseImageFormatFlagBits { VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT = 0x00000001, @@ -1677,12 +1696,6 @@ typedef enum VkSparseImageFormatFlagBits { } VkSparseImageFormatFlagBits; typedef VkFlags VkSparseImageFormatFlags; -typedef enum VkSparseMemoryBindFlagBits { - VK_SPARSE_MEMORY_BIND_METADATA_BIT = 0x00000001, - VK_SPARSE_MEMORY_BIND_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkSparseMemoryBindFlagBits; -typedef VkFlags VkSparseMemoryBindFlags; - typedef enum VkFenceCreateFlagBits { VK_FENCE_CREATE_SIGNALED_BIT = 0x00000001, VK_FENCE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF @@ -1690,7 +1703,6 @@ typedef enum VkFenceCreateFlagBits { typedef VkFlags VkFenceCreateFlags; typedef VkFlags VkSemaphoreCreateFlags; typedef VkFlags VkEventCreateFlags; -typedef VkFlags VkQueryPoolCreateFlags; typedef enum VkQueryPipelineStatisticFlagBits { VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT = 0x00000001, @@ -1707,6 +1719,7 @@ typedef enum VkQueryPipelineStatisticFlagBits { VK_QUERY_PIPELINE_STATISTIC_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkQueryPipelineStatisticFlagBits; typedef VkFlags VkQueryPipelineStatisticFlags; +typedef VkFlags VkQueryPoolCreateFlags; typedef enum VkQueryResultFlagBits { VK_QUERY_RESULT_64_BIT = 0x00000001, @@ -1743,7 +1756,8 @@ typedef enum VkBufferUsageFlagBits { VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT = 0x00000800, VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT = 0x00001000, VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00000200, - VK_BUFFER_USAGE_RAY_TRACING_BIT_NV = 0x00000400, + VK_BUFFER_USAGE_RAY_TRACING_BIT_KHR = 0x00000400, + VK_BUFFER_USAGE_RAY_TRACING_BIT_NV = VK_BUFFER_USAGE_RAY_TRACING_BIT_KHR, VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, VK_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF @@ -1753,6 +1767,7 @@ typedef VkFlags VkBufferViewCreateFlags; typedef enum VkImageViewCreateFlagBits { VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT = 0x00000001, + VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT = 0x00000002, VK_IMAGE_VIEW_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkImageViewCreateFlagBits; typedef VkFlags VkImageViewCreateFlags; @@ -1761,17 +1776,41 @@ typedef enum VkShaderModuleCreateFlagBits { VK_SHADER_MODULE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkShaderModuleCreateFlagBits; typedef VkFlags VkShaderModuleCreateFlags; + +typedef enum VkPipelineCacheCreateFlagBits { + VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT = 0x00000001, + VK_PIPELINE_CACHE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineCacheCreateFlagBits; typedef VkFlags VkPipelineCacheCreateFlags; +typedef enum VkColorComponentFlagBits { + VK_COLOR_COMPONENT_R_BIT = 0x00000001, + VK_COLOR_COMPONENT_G_BIT = 0x00000002, + VK_COLOR_COMPONENT_B_BIT = 0x00000004, + VK_COLOR_COMPONENT_A_BIT = 0x00000008, + VK_COLOR_COMPONENT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkColorComponentFlagBits; +typedef VkFlags VkColorComponentFlags; + typedef enum VkPipelineCreateFlagBits { VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT = 0x00000001, VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT = 0x00000002, VK_PIPELINE_CREATE_DERIVATIVE_BIT = 0x00000004, VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT = 0x00000008, VK_PIPELINE_CREATE_DISPATCH_BASE_BIT = 0x00000010, + VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR = 0x00004000, + VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR = 0x00008000, + VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR = 0x00010000, + VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR = 0x00020000, + VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR = 0x00001000, + VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR = 0x00002000, VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV = 0x00000020, VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR = 0x00000040, VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR = 0x00000080, + VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV = 0x00040000, + VK_PIPELINE_CREATE_LIBRARY_BIT_KHR = 0x00000800, + VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT = 0x00000100, + VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT = 0x00000200, VK_PIPELINE_CREATE_DISPATCH_BASE = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT, VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, VK_PIPELINE_CREATE_DISPATCH_BASE_KHR = VK_PIPELINE_CREATE_DISPATCH_BASE, @@ -1795,21 +1834,22 @@ typedef enum VkShaderStageFlagBits { VK_SHADER_STAGE_COMPUTE_BIT = 0x00000020, VK_SHADER_STAGE_ALL_GRAPHICS = 0x0000001F, VK_SHADER_STAGE_ALL = 0x7FFFFFFF, - VK_SHADER_STAGE_RAYGEN_BIT_NV = 0x00000100, - VK_SHADER_STAGE_ANY_HIT_BIT_NV = 0x00000200, - VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV = 0x00000400, - VK_SHADER_STAGE_MISS_BIT_NV = 0x00000800, - VK_SHADER_STAGE_INTERSECTION_BIT_NV = 0x00001000, - VK_SHADER_STAGE_CALLABLE_BIT_NV = 0x00002000, + VK_SHADER_STAGE_RAYGEN_BIT_KHR = 0x00000100, + VK_SHADER_STAGE_ANY_HIT_BIT_KHR = 0x00000200, + VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR = 0x00000400, + VK_SHADER_STAGE_MISS_BIT_KHR = 0x00000800, + VK_SHADER_STAGE_INTERSECTION_BIT_KHR = 0x00001000, + VK_SHADER_STAGE_CALLABLE_BIT_KHR = 0x00002000, VK_SHADER_STAGE_TASK_BIT_NV = 0x00000040, VK_SHADER_STAGE_MESH_BIT_NV = 0x00000080, + VK_SHADER_STAGE_RAYGEN_BIT_NV = VK_SHADER_STAGE_RAYGEN_BIT_KHR, + VK_SHADER_STAGE_ANY_HIT_BIT_NV = VK_SHADER_STAGE_ANY_HIT_BIT_KHR, + VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR, + VK_SHADER_STAGE_MISS_BIT_NV = VK_SHADER_STAGE_MISS_BIT_KHR, + VK_SHADER_STAGE_INTERSECTION_BIT_NV = VK_SHADER_STAGE_INTERSECTION_BIT_KHR, + VK_SHADER_STAGE_CALLABLE_BIT_NV = VK_SHADER_STAGE_CALLABLE_BIT_KHR, VK_SHADER_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkShaderStageFlagBits; -typedef VkFlags VkPipelineVertexInputStateCreateFlags; -typedef VkFlags VkPipelineInputAssemblyStateCreateFlags; -typedef VkFlags VkPipelineTessellationStateCreateFlags; -typedef VkFlags VkPipelineViewportStateCreateFlags; -typedef VkFlags VkPipelineRasterizationStateCreateFlags; typedef enum VkCullModeFlagBits { VK_CULL_MODE_NONE = 0, @@ -1819,18 +1859,14 @@ typedef enum VkCullModeFlagBits { VK_CULL_MODE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkCullModeFlagBits; typedef VkFlags VkCullModeFlags; +typedef VkFlags VkPipelineVertexInputStateCreateFlags; +typedef VkFlags VkPipelineInputAssemblyStateCreateFlags; +typedef VkFlags VkPipelineTessellationStateCreateFlags; +typedef VkFlags VkPipelineViewportStateCreateFlags; +typedef VkFlags VkPipelineRasterizationStateCreateFlags; typedef VkFlags VkPipelineMultisampleStateCreateFlags; typedef VkFlags VkPipelineDepthStencilStateCreateFlags; typedef VkFlags VkPipelineColorBlendStateCreateFlags; - -typedef enum VkColorComponentFlagBits { - VK_COLOR_COMPONENT_R_BIT = 0x00000001, - VK_COLOR_COMPONENT_G_BIT = 0x00000002, - VK_COLOR_COMPONENT_B_BIT = 0x00000004, - VK_COLOR_COMPONENT_A_BIT = 0x00000008, - VK_COLOR_COMPONENT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkColorComponentFlagBits; -typedef VkFlags VkColorComponentFlags; typedef VkFlags VkPipelineDynamicStateCreateFlags; typedef VkFlags VkPipelineLayoutCreateFlags; typedef VkFlags VkShaderStageFlags; @@ -1842,14 +1878,6 @@ typedef enum VkSamplerCreateFlagBits { } VkSamplerCreateFlagBits; typedef VkFlags VkSamplerCreateFlags; -typedef enum VkDescriptorSetLayoutCreateFlagBits { - VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT = 0x00000002, - VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = 0x00000001, - VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT, - VK_DESCRIPTOR_SET_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkDescriptorSetLayoutCreateFlagBits; -typedef VkFlags VkDescriptorSetLayoutCreateFlags; - typedef enum VkDescriptorPoolCreateFlagBits { VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT = 0x00000001, VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT = 0x00000002, @@ -1859,17 +1887,13 @@ typedef enum VkDescriptorPoolCreateFlagBits { typedef VkFlags VkDescriptorPoolCreateFlags; typedef VkFlags VkDescriptorPoolResetFlags; -typedef enum VkFramebufferCreateFlagBits { - VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT = 0x00000001, - VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, - VK_FRAMEBUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkFramebufferCreateFlagBits; -typedef VkFlags VkFramebufferCreateFlags; - -typedef enum VkRenderPassCreateFlagBits { - VK_RENDER_PASS_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkRenderPassCreateFlagBits; -typedef VkFlags VkRenderPassCreateFlags; +typedef enum VkDescriptorSetLayoutCreateFlagBits { + VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT = 0x00000002, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = 0x00000001, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorSetLayoutCreateFlagBits; +typedef VkFlags VkDescriptorSetLayoutCreateFlags; typedef enum VkAttachmentDescriptionFlagBits { VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT = 0x00000001, @@ -1877,46 +1901,6 @@ typedef enum VkAttachmentDescriptionFlagBits { } VkAttachmentDescriptionFlagBits; typedef VkFlags VkAttachmentDescriptionFlags; -typedef enum VkSubpassDescriptionFlagBits { - VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX = 0x00000001, - VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX = 0x00000002, - VK_SUBPASS_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkSubpassDescriptionFlagBits; -typedef VkFlags VkSubpassDescriptionFlags; - -typedef enum VkAccessFlagBits { - VK_ACCESS_INDIRECT_COMMAND_READ_BIT = 0x00000001, - VK_ACCESS_INDEX_READ_BIT = 0x00000002, - VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004, - VK_ACCESS_UNIFORM_READ_BIT = 0x00000008, - VK_ACCESS_INPUT_ATTACHMENT_READ_BIT = 0x00000010, - VK_ACCESS_SHADER_READ_BIT = 0x00000020, - VK_ACCESS_SHADER_WRITE_BIT = 0x00000040, - VK_ACCESS_COLOR_ATTACHMENT_READ_BIT = 0x00000080, - VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100, - VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200, - VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400, - VK_ACCESS_TRANSFER_READ_BIT = 0x00000800, - VK_ACCESS_TRANSFER_WRITE_BIT = 0x00001000, - VK_ACCESS_HOST_READ_BIT = 0x00002000, - VK_ACCESS_HOST_WRITE_BIT = 0x00004000, - VK_ACCESS_MEMORY_READ_BIT = 0x00008000, - VK_ACCESS_MEMORY_WRITE_BIT = 0x00010000, - VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 0x02000000, - VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 0x04000000, - VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 0x08000000, - VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT = 0x00100000, - VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX = 0x00020000, - VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX = 0x00040000, - VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000, - VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV = 0x00800000, - VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV = 0x00200000, - VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV = 0x00400000, - VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000, - VK_ACCESS_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkAccessFlagBits; -typedef VkFlags VkAccessFlags; - typedef enum VkDependencyFlagBits { VK_DEPENDENCY_BY_REGION_BIT = 0x00000001, VK_DEPENDENCY_DEVICE_GROUP_BIT = 0x00000004, @@ -1927,6 +1911,28 @@ typedef enum VkDependencyFlagBits { } VkDependencyFlagBits; typedef VkFlags VkDependencyFlags; +typedef enum VkFramebufferCreateFlagBits { + VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT = 0x00000001, + VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, + VK_FRAMEBUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkFramebufferCreateFlagBits; +typedef VkFlags VkFramebufferCreateFlags; + +typedef enum VkRenderPassCreateFlagBits { + VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM = 0x00000002, + VK_RENDER_PASS_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkRenderPassCreateFlagBits; +typedef VkFlags VkRenderPassCreateFlags; + +typedef enum VkSubpassDescriptionFlagBits { + VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX = 0x00000001, + VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX = 0x00000002, + VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM = 0x00000004, + VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM = 0x00000008, + VK_SUBPASS_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSubpassDescriptionFlagBits; +typedef VkFlags VkSubpassDescriptionFlags; + typedef enum VkCommandPoolCreateFlagBits { VK_COMMAND_POOL_CREATE_TRANSIENT_BIT = 0x00000001, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT = 0x00000002, @@ -1969,26 +1975,103 @@ typedef enum VkStencilFaceFlagBits { VK_STENCIL_FACE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkStencilFaceFlagBits; typedef VkFlags VkStencilFaceFlags; -typedef struct VkApplicationInfo { +typedef struct VkExtent2D { + uint32_t width; + uint32_t height; +} VkExtent2D; + +typedef struct VkExtent3D { + uint32_t width; + uint32_t height; + uint32_t depth; +} VkExtent3D; + +typedef struct VkOffset2D { + int32_t x; + int32_t y; +} VkOffset2D; + +typedef struct VkOffset3D { + int32_t x; + int32_t y; + int32_t z; +} VkOffset3D; + +typedef struct VkRect2D { + VkOffset2D offset; + VkExtent2D extent; +} VkRect2D; + +typedef struct VkBaseInStructure { + VkStructureType sType; + const struct VkBaseInStructure* pNext; +} VkBaseInStructure; + +typedef struct VkBaseOutStructure { + VkStructureType sType; + struct VkBaseOutStructure* pNext; +} VkBaseOutStructure; + +typedef struct VkBufferMemoryBarrier { VkStructureType sType; const void* pNext; - const char* pApplicationName; - uint32_t applicationVersion; - const char* pEngineName; - uint32_t engineVersion; - uint32_t apiVersion; -} VkApplicationInfo; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize size; +} VkBufferMemoryBarrier; -typedef struct VkInstanceCreateInfo { - VkStructureType sType; - const void* pNext; - VkInstanceCreateFlags flags; - const VkApplicationInfo* pApplicationInfo; - uint32_t enabledLayerCount; - const char* const* ppEnabledLayerNames; - uint32_t enabledExtensionCount; - const char* const* ppEnabledExtensionNames; -} VkInstanceCreateInfo; +typedef struct VkDispatchIndirectCommand { + uint32_t x; + uint32_t y; + uint32_t z; +} VkDispatchIndirectCommand; + +typedef struct VkDrawIndexedIndirectCommand { + uint32_t indexCount; + uint32_t instanceCount; + uint32_t firstIndex; + int32_t vertexOffset; + uint32_t firstInstance; +} VkDrawIndexedIndirectCommand; + +typedef struct VkDrawIndirectCommand { + uint32_t vertexCount; + uint32_t instanceCount; + uint32_t firstVertex; + uint32_t firstInstance; +} VkDrawIndirectCommand; + +typedef struct VkImageSubresourceRange { + VkImageAspectFlags aspectMask; + uint32_t baseMipLevel; + uint32_t levelCount; + uint32_t baseArrayLayer; + uint32_t layerCount; +} VkImageSubresourceRange; + +typedef struct VkImageMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + VkImageLayout oldLayout; + VkImageLayout newLayout; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkImage image; + VkImageSubresourceRange subresourceRange; +} VkImageMemoryBarrier; + +typedef struct VkMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; +} VkMemoryBarrier; typedef void* (VKAPI_PTR *PFN_vkAllocationFunction)( void* pUserData, @@ -1996,13 +2079,6 @@ typedef void* (VKAPI_PTR *PFN_vkAllocationFunction)( size_t alignment, VkSystemAllocationScope allocationScope); -typedef void* (VKAPI_PTR *PFN_vkReallocationFunction)( - void* pUserData, - void* pOriginal, - size_t size, - size_t alignment, - VkSystemAllocationScope allocationScope); - typedef void (VKAPI_PTR *PFN_vkFreeFunction)( void* pUserData, void* pMemory); @@ -2019,6 +2095,14 @@ typedef void (VKAPI_PTR *PFN_vkInternalFreeNotification)( VkInternalAllocationType allocationType, VkSystemAllocationScope allocationScope); +typedef void* (VKAPI_PTR *PFN_vkReallocationFunction)( + void* pUserData, + void* pOriginal, + size_t size, + size_t alignment, + VkSystemAllocationScope allocationScope); + +typedef void (VKAPI_PTR *PFN_vkVoidFunction)(void); typedef struct VkAllocationCallbacks { void* pUserData; PFN_vkAllocationFunction pfnAllocation; @@ -2028,6 +2112,51 @@ typedef struct VkAllocationCallbacks { PFN_vkInternalFreeNotification pfnInternalFree; } VkAllocationCallbacks; +typedef struct VkApplicationInfo { + VkStructureType sType; + const void* pNext; + const char* pApplicationName; + uint32_t applicationVersion; + const char* pEngineName; + uint32_t engineVersion; + uint32_t apiVersion; +} VkApplicationInfo; + +typedef struct VkFormatProperties { + VkFormatFeatureFlags linearTilingFeatures; + VkFormatFeatureFlags optimalTilingFeatures; + VkFormatFeatureFlags bufferFeatures; +} VkFormatProperties; + +typedef struct VkImageFormatProperties { + VkExtent3D maxExtent; + uint32_t maxMipLevels; + uint32_t maxArrayLayers; + VkSampleCountFlags sampleCounts; + VkDeviceSize maxResourceSize; +} VkImageFormatProperties; + +typedef struct VkInstanceCreateInfo { + VkStructureType sType; + const void* pNext; + VkInstanceCreateFlags flags; + const VkApplicationInfo* pApplicationInfo; + uint32_t enabledLayerCount; + const char* const* ppEnabledLayerNames; + uint32_t enabledExtensionCount; + const char* const* ppEnabledExtensionNames; +} VkInstanceCreateInfo; + +typedef struct VkMemoryHeap { + VkDeviceSize size; + VkMemoryHeapFlags flags; +} VkMemoryHeap; + +typedef struct VkMemoryType { + VkMemoryPropertyFlags propertyFlags; + uint32_t heapIndex; +} VkMemoryType; + typedef struct VkPhysicalDeviceFeatures { VkBool32 robustBufferAccess; VkBool32 fullDrawIndexUint32; @@ -2086,26 +2215,6 @@ typedef struct VkPhysicalDeviceFeatures { VkBool32 inheritedQueries; } VkPhysicalDeviceFeatures; -typedef struct VkFormatProperties { - VkFormatFeatureFlags linearTilingFeatures; - VkFormatFeatureFlags optimalTilingFeatures; - VkFormatFeatureFlags bufferFeatures; -} VkFormatProperties; - -typedef struct VkExtent3D { - uint32_t width; - uint32_t height; - uint32_t depth; -} VkExtent3D; - -typedef struct VkImageFormatProperties { - VkExtent3D maxExtent; - uint32_t maxMipLevels; - uint32_t maxArrayLayers; - VkSampleCountFlags sampleCounts; - VkDeviceSize maxResourceSize; -} VkImageFormatProperties; - typedef struct VkPhysicalDeviceLimits { uint32_t maxImageDimension1D; uint32_t maxImageDimension2D; @@ -2215,6 +2324,13 @@ typedef struct VkPhysicalDeviceLimits { VkDeviceSize nonCoherentAtomSize; } VkPhysicalDeviceLimits; +typedef struct VkPhysicalDeviceMemoryProperties { + uint32_t memoryTypeCount; + VkMemoryType memoryTypes[VK_MAX_MEMORY_TYPES]; + uint32_t memoryHeapCount; + VkMemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS]; +} VkPhysicalDeviceMemoryProperties; + typedef struct VkPhysicalDeviceSparseProperties { VkBool32 residencyStandard2DBlockShape; VkBool32 residencyStandard2DMultisampleBlockShape; @@ -2242,24 +2358,6 @@ typedef struct VkQueueFamilyProperties { VkExtent3D minImageTransferGranularity; } VkQueueFamilyProperties; -typedef struct VkMemoryType { - VkMemoryPropertyFlags propertyFlags; - uint32_t heapIndex; -} VkMemoryType; - -typedef struct VkMemoryHeap { - VkDeviceSize size; - VkMemoryHeapFlags flags; -} VkMemoryHeap; - -typedef struct VkPhysicalDeviceMemoryProperties { - uint32_t memoryTypeCount; - VkMemoryType memoryTypes[VK_MAX_MEMORY_TYPES]; - uint32_t memoryHeapCount; - VkMemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS]; -} VkPhysicalDeviceMemoryProperties; - -typedef void (VKAPI_PTR *PFN_vkVoidFunction)(void); typedef struct VkDeviceQueueCreateInfo { VkStructureType sType; const void* pNext; @@ -2306,13 +2404,6 @@ typedef struct VkSubmitInfo { const VkSemaphore* pSignalSemaphores; } VkSubmitInfo; -typedef struct VkMemoryAllocateInfo { - VkStructureType sType; - const void* pNext; - VkDeviceSize allocationSize; - uint32_t memoryTypeIndex; -} VkMemoryAllocateInfo; - typedef struct VkMappedMemoryRange { VkStructureType sType; const void* pNext; @@ -2321,26 +2412,19 @@ typedef struct VkMappedMemoryRange { VkDeviceSize size; } VkMappedMemoryRange; +typedef struct VkMemoryAllocateInfo { + VkStructureType sType; + const void* pNext; + VkDeviceSize allocationSize; + uint32_t memoryTypeIndex; +} VkMemoryAllocateInfo; + typedef struct VkMemoryRequirements { VkDeviceSize size; VkDeviceSize alignment; uint32_t memoryTypeBits; } VkMemoryRequirements; -typedef struct VkSparseImageFormatProperties { - VkImageAspectFlags aspectMask; - VkExtent3D imageGranularity; - VkSparseImageFormatFlags flags; -} VkSparseImageFormatProperties; - -typedef struct VkSparseImageMemoryRequirements { - VkSparseImageFormatProperties formatProperties; - uint32_t imageMipTailFirstLod; - VkDeviceSize imageMipTailSize; - VkDeviceSize imageMipTailOffset; - VkDeviceSize imageMipTailStride; -} VkSparseImageMemoryRequirements; - typedef struct VkSparseMemoryBind { VkDeviceSize resourceOffset; VkDeviceSize size; @@ -2367,12 +2451,6 @@ typedef struct VkImageSubresource { uint32_t arrayLayer; } VkImageSubresource; -typedef struct VkOffset3D { - int32_t x; - int32_t y; - int32_t z; -} VkOffset3D; - typedef struct VkSparseImageMemoryBind { VkImageSubresource subresource; VkOffset3D offset; @@ -2403,6 +2481,20 @@ typedef struct VkBindSparseInfo { const VkSemaphore* pSignalSemaphores; } VkBindSparseInfo; +typedef struct VkSparseImageFormatProperties { + VkImageAspectFlags aspectMask; + VkExtent3D imageGranularity; + VkSparseImageFormatFlags flags; +} VkSparseImageFormatProperties; + +typedef struct VkSparseImageMemoryRequirements { + VkSparseImageFormatProperties formatProperties; + uint32_t imageMipTailFirstLod; + VkDeviceSize imageMipTailSize; + VkDeviceSize imageMipTailOffset; + VkDeviceSize imageMipTailStride; +} VkSparseImageMemoryRequirements; + typedef struct VkFenceCreateInfo { VkStructureType sType; const void* pNext; @@ -2484,14 +2576,6 @@ typedef struct VkComponentMapping { VkComponentSwizzle a; } VkComponentMapping; -typedef struct VkImageSubresourceRange { - VkImageAspectFlags aspectMask; - uint32_t baseMipLevel; - uint32_t levelCount; - uint32_t baseArrayLayer; - uint32_t layerCount; -} VkImageSubresourceRange; - typedef struct VkImageViewCreateInfo { VkStructureType sType; const void* pNext; @@ -2542,6 +2626,16 @@ typedef struct VkPipelineShaderStageCreateInfo { const VkSpecializationInfo* pSpecializationInfo; } VkPipelineShaderStageCreateInfo; +typedef struct VkComputePipelineCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + VkPipelineShaderStageCreateInfo stage; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkComputePipelineCreateInfo; + typedef struct VkVertexInputBindingDescription { uint32_t binding; uint32_t stride; @@ -2589,21 +2683,6 @@ typedef struct VkViewport { float maxDepth; } VkViewport; -typedef struct VkOffset2D { - int32_t x; - int32_t y; -} VkOffset2D; - -typedef struct VkExtent2D { - uint32_t width; - uint32_t height; -} VkExtent2D; - -typedef struct VkRect2D { - VkOffset2D offset; - VkExtent2D extent; -} VkRect2D; - typedef struct VkPipelineViewportStateCreateInfo { VkStructureType sType; const void* pNext; @@ -2719,16 +2798,6 @@ typedef struct VkGraphicsPipelineCreateInfo { int32_t basePipelineIndex; } VkGraphicsPipelineCreateInfo; -typedef struct VkComputePipelineCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineCreateFlags flags; - VkPipelineShaderStageCreateInfo stage; - VkPipelineLayout layout; - VkPipeline basePipelineHandle; - int32_t basePipelineIndex; -} VkComputePipelineCreateInfo; - typedef struct VkPushConstantRange { VkShaderStageFlags stageFlags; uint32_t offset; @@ -2766,21 +2835,29 @@ typedef struct VkSamplerCreateInfo { VkBool32 unnormalizedCoordinates; } VkSamplerCreateInfo; -typedef struct VkDescriptorSetLayoutBinding { - uint32_t binding; - VkDescriptorType descriptorType; - uint32_t descriptorCount; - VkShaderStageFlags stageFlags; - const VkSampler* pImmutableSamplers; -} VkDescriptorSetLayoutBinding; +typedef struct VkCopyDescriptorSet { + VkStructureType sType; + const void* pNext; + VkDescriptorSet srcSet; + uint32_t srcBinding; + uint32_t srcArrayElement; + VkDescriptorSet dstSet; + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; +} VkCopyDescriptorSet; -typedef struct VkDescriptorSetLayoutCreateInfo { - VkStructureType sType; - const void* pNext; - VkDescriptorSetLayoutCreateFlags flags; - uint32_t bindingCount; - const VkDescriptorSetLayoutBinding* pBindings; -} VkDescriptorSetLayoutCreateInfo; +typedef struct VkDescriptorBufferInfo { + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize range; +} VkDescriptorBufferInfo; + +typedef struct VkDescriptorImageInfo { + VkSampler sampler; + VkImageView imageView; + VkImageLayout imageLayout; +} VkDescriptorImageInfo; typedef struct VkDescriptorPoolSize { VkDescriptorType type; @@ -2804,17 +2881,21 @@ typedef struct VkDescriptorSetAllocateInfo { const VkDescriptorSetLayout* pSetLayouts; } VkDescriptorSetAllocateInfo; -typedef struct VkDescriptorImageInfo { - VkSampler sampler; - VkImageView imageView; - VkImageLayout imageLayout; -} VkDescriptorImageInfo; +typedef struct VkDescriptorSetLayoutBinding { + uint32_t binding; + VkDescriptorType descriptorType; + uint32_t descriptorCount; + VkShaderStageFlags stageFlags; + const VkSampler* pImmutableSamplers; +} VkDescriptorSetLayoutBinding; -typedef struct VkDescriptorBufferInfo { - VkBuffer buffer; - VkDeviceSize offset; - VkDeviceSize range; -} VkDescriptorBufferInfo; +typedef struct VkDescriptorSetLayoutCreateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorSetLayoutCreateFlags flags; + uint32_t bindingCount; + const VkDescriptorSetLayoutBinding* pBindings; +} VkDescriptorSetLayoutCreateInfo; typedef struct VkWriteDescriptorSet { VkStructureType sType; @@ -2829,30 +2910,6 @@ typedef struct VkWriteDescriptorSet { const VkBufferView* pTexelBufferView; } VkWriteDescriptorSet; -typedef struct VkCopyDescriptorSet { - VkStructureType sType; - const void* pNext; - VkDescriptorSet srcSet; - uint32_t srcBinding; - uint32_t srcArrayElement; - VkDescriptorSet dstSet; - uint32_t dstBinding; - uint32_t dstArrayElement; - uint32_t descriptorCount; -} VkCopyDescriptorSet; - -typedef struct VkFramebufferCreateInfo { - VkStructureType sType; - const void* pNext; - VkFramebufferCreateFlags flags; - VkRenderPass renderPass; - uint32_t attachmentCount; - const VkImageView* pAttachments; - uint32_t width; - uint32_t height; - uint32_t layers; -} VkFramebufferCreateInfo; - typedef struct VkAttachmentDescription { VkAttachmentDescriptionFlags flags; VkFormat format; @@ -2870,6 +2927,18 @@ typedef struct VkAttachmentReference { VkImageLayout layout; } VkAttachmentReference; +typedef struct VkFramebufferCreateInfo { + VkStructureType sType; + const void* pNext; + VkFramebufferCreateFlags flags; + VkRenderPass renderPass; + uint32_t attachmentCount; + const VkImageView* pAttachments; + uint32_t width; + uint32_t height; + uint32_t layers; +} VkFramebufferCreateInfo; + typedef struct VkSubpassDescription { VkSubpassDescriptionFlags flags; VkPipelineBindPoint pipelineBindPoint; @@ -2951,21 +3020,6 @@ typedef struct VkImageSubresourceLayers { uint32_t layerCount; } VkImageSubresourceLayers; -typedef struct VkImageCopy { - VkImageSubresourceLayers srcSubresource; - VkOffset3D srcOffset; - VkImageSubresourceLayers dstSubresource; - VkOffset3D dstOffset; - VkExtent3D extent; -} VkImageCopy; - -typedef struct VkImageBlit { - VkImageSubresourceLayers srcSubresource; - VkOffset3D srcOffsets[2]; - VkImageSubresourceLayers dstSubresource; - VkOffset3D dstOffsets[2]; -} VkImageBlit; - typedef struct VkBufferImageCopy { VkDeviceSize bufferOffset; uint32_t bufferRowLength; @@ -3003,6 +3057,21 @@ typedef struct VkClearRect { uint32_t layerCount; } VkClearRect; +typedef struct VkImageBlit { + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffsets[2]; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffsets[2]; +} VkImageBlit; + +typedef struct VkImageCopy { + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageCopy; + typedef struct VkImageResolve { VkImageSubresourceLayers srcSubresource; VkOffset3D srcOffset; @@ -3011,38 +3080,6 @@ typedef struct VkImageResolve { VkExtent3D extent; } VkImageResolve; -typedef struct VkMemoryBarrier { - VkStructureType sType; - const void* pNext; - VkAccessFlags srcAccessMask; - VkAccessFlags dstAccessMask; -} VkMemoryBarrier; - -typedef struct VkBufferMemoryBarrier { - VkStructureType sType; - const void* pNext; - VkAccessFlags srcAccessMask; - VkAccessFlags dstAccessMask; - uint32_t srcQueueFamilyIndex; - uint32_t dstQueueFamilyIndex; - VkBuffer buffer; - VkDeviceSize offset; - VkDeviceSize size; -} VkBufferMemoryBarrier; - -typedef struct VkImageMemoryBarrier { - VkStructureType sType; - const void* pNext; - VkAccessFlags srcAccessMask; - VkAccessFlags dstAccessMask; - VkImageLayout oldLayout; - VkImageLayout newLayout; - uint32_t srcQueueFamilyIndex; - uint32_t dstQueueFamilyIndex; - VkImage image; - VkImageSubresourceRange subresourceRange; -} VkImageMemoryBarrier; - typedef struct VkRenderPassBeginInfo { VkStructureType sType; const void* pNext; @@ -3053,37 +3090,6 @@ typedef struct VkRenderPassBeginInfo { const VkClearValue* pClearValues; } VkRenderPassBeginInfo; -typedef struct VkDispatchIndirectCommand { - uint32_t x; - uint32_t y; - uint32_t z; -} VkDispatchIndirectCommand; - -typedef struct VkDrawIndexedIndirectCommand { - uint32_t indexCount; - uint32_t instanceCount; - uint32_t firstIndex; - int32_t vertexOffset; - uint32_t firstInstance; -} VkDrawIndexedIndirectCommand; - -typedef struct VkDrawIndirectCommand { - uint32_t vertexCount; - uint32_t instanceCount; - uint32_t firstVertex; - uint32_t firstInstance; -} VkDrawIndirectCommand; - -typedef struct VkBaseOutStructure { - VkStructureType sType; - struct VkBaseOutStructure* pNext; -} VkBaseOutStructure; - -typedef struct VkBaseInStructure { - VkStructureType sType; - const struct VkBaseInStructure* pNext; -} VkBaseInStructure; - typedef VkResult (VKAPI_PTR *PFN_vkCreateInstance)(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance); typedef void (VKAPI_PTR *PFN_vkDestroyInstance)(VkInstance instance, const VkAllocationCallbacks* pAllocator); typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDevices)(VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices); @@ -4037,9 +4043,6 @@ typedef enum VkPointClippingBehavior { VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY = 1, VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES_KHR = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY_KHR = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY, - VK_POINT_CLIPPING_BEHAVIOR_BEGIN_RANGE = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, - VK_POINT_CLIPPING_BEHAVIOR_END_RANGE = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY, - VK_POINT_CLIPPING_BEHAVIOR_RANGE_SIZE = (VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY - VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES + 1), VK_POINT_CLIPPING_BEHAVIOR_MAX_ENUM = 0x7FFFFFFF } VkPointClippingBehavior; @@ -4048,9 +4051,6 @@ typedef enum VkTessellationDomainOrigin { VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT = 1, VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT_KHR = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT, VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT_KHR = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT, - VK_TESSELLATION_DOMAIN_ORIGIN_BEGIN_RANGE = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT, - VK_TESSELLATION_DOMAIN_ORIGIN_END_RANGE = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT, - VK_TESSELLATION_DOMAIN_ORIGIN_RANGE_SIZE = (VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT - VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT + 1), VK_TESSELLATION_DOMAIN_ORIGIN_MAX_ENUM = 0x7FFFFFFF } VkTessellationDomainOrigin; @@ -4065,9 +4065,6 @@ typedef enum VkSamplerYcbcrModelConversion { VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020, - VK_SAMPLER_YCBCR_MODEL_CONVERSION_BEGIN_RANGE = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, - VK_SAMPLER_YCBCR_MODEL_CONVERSION_END_RANGE = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020, - VK_SAMPLER_YCBCR_MODEL_CONVERSION_RANGE_SIZE = (VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020 - VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY + 1), VK_SAMPLER_YCBCR_MODEL_CONVERSION_MAX_ENUM = 0x7FFFFFFF } VkSamplerYcbcrModelConversion; @@ -4076,9 +4073,6 @@ typedef enum VkSamplerYcbcrRange { VK_SAMPLER_YCBCR_RANGE_ITU_NARROW = 1, VK_SAMPLER_YCBCR_RANGE_ITU_FULL_KHR = VK_SAMPLER_YCBCR_RANGE_ITU_FULL, VK_SAMPLER_YCBCR_RANGE_ITU_NARROW_KHR = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW, - VK_SAMPLER_YCBCR_RANGE_BEGIN_RANGE = VK_SAMPLER_YCBCR_RANGE_ITU_FULL, - VK_SAMPLER_YCBCR_RANGE_END_RANGE = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW, - VK_SAMPLER_YCBCR_RANGE_RANGE_SIZE = (VK_SAMPLER_YCBCR_RANGE_ITU_NARROW - VK_SAMPLER_YCBCR_RANGE_ITU_FULL + 1), VK_SAMPLER_YCBCR_RANGE_MAX_ENUM = 0x7FFFFFFF } VkSamplerYcbcrRange; @@ -4087,9 +4081,6 @@ typedef enum VkChromaLocation { VK_CHROMA_LOCATION_MIDPOINT = 1, VK_CHROMA_LOCATION_COSITED_EVEN_KHR = VK_CHROMA_LOCATION_COSITED_EVEN, VK_CHROMA_LOCATION_MIDPOINT_KHR = VK_CHROMA_LOCATION_MIDPOINT, - VK_CHROMA_LOCATION_BEGIN_RANGE = VK_CHROMA_LOCATION_COSITED_EVEN, - VK_CHROMA_LOCATION_END_RANGE = VK_CHROMA_LOCATION_MIDPOINT, - VK_CHROMA_LOCATION_RANGE_SIZE = (VK_CHROMA_LOCATION_MIDPOINT - VK_CHROMA_LOCATION_COSITED_EVEN + 1), VK_CHROMA_LOCATION_MAX_ENUM = 0x7FFFFFFF } VkChromaLocation; @@ -4097,9 +4088,6 @@ typedef enum VkDescriptorUpdateTemplateType { VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET = 0, VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR = 1, VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_BEGIN_RANGE = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_END_RANGE = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_RANGE_SIZE = (VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET + 1), VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_MAX_ENUM = 0x7FFFFFFF } VkDescriptorUpdateTemplateType; @@ -4219,6 +4207,7 @@ typedef enum VkExternalSemaphoreHandleTypeFlagBits { VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004, VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT = 0x00000008, VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT = 0x00000010, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D11_FENCE_BIT = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT, VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT, VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, @@ -4378,8 +4367,6 @@ typedef struct VkMemoryRequirements2 { VkMemoryRequirements memoryRequirements; } VkMemoryRequirements2; -typedef VkMemoryRequirements2 VkMemoryRequirements2KHR; - typedef struct VkSparseImageMemoryRequirements2 { VkStructureType sType; void* pNext; @@ -4907,7 +4894,6 @@ VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupport( // Vulkan 1.2 version number #define VK_API_VERSION_1_2 VK_MAKE_VERSION(1, 2, 0)// Patch version should always be set to 0 -typedef uint64_t VkDeviceAddress; #define VK_MAX_DRIVER_NAME_SIZE 256 #define VK_MAX_DRIVER_INFO_SIZE 256 @@ -4924,6 +4910,8 @@ typedef enum VkDriverId { VK_DRIVER_ID_GOOGLE_SWIFTSHADER = 10, VK_DRIVER_ID_GGP_PROPRIETARY = 11, VK_DRIVER_ID_BROADCOM_PROPRIETARY = 12, + VK_DRIVER_ID_MESA_LLVMPIPE = 13, + VK_DRIVER_ID_MOLTENVK = 14, VK_DRIVER_ID_AMD_PROPRIETARY_KHR = VK_DRIVER_ID_AMD_PROPRIETARY, VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR = VK_DRIVER_ID_AMD_OPEN_SOURCE, VK_DRIVER_ID_MESA_RADV_KHR = VK_DRIVER_ID_MESA_RADV, @@ -4936,9 +4924,6 @@ typedef enum VkDriverId { VK_DRIVER_ID_GOOGLE_SWIFTSHADER_KHR = VK_DRIVER_ID_GOOGLE_SWIFTSHADER, VK_DRIVER_ID_GGP_PROPRIETARY_KHR = VK_DRIVER_ID_GGP_PROPRIETARY, VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR = VK_DRIVER_ID_BROADCOM_PROPRIETARY, - VK_DRIVER_ID_BEGIN_RANGE = VK_DRIVER_ID_AMD_PROPRIETARY, - VK_DRIVER_ID_END_RANGE = VK_DRIVER_ID_BROADCOM_PROPRIETARY, - VK_DRIVER_ID_RANGE_SIZE = (VK_DRIVER_ID_BROADCOM_PROPRIETARY - VK_DRIVER_ID_AMD_PROPRIETARY + 1), VK_DRIVER_ID_MAX_ENUM = 0x7FFFFFFF } VkDriverId; @@ -4949,9 +4934,6 @@ typedef enum VkShaderFloatControlsIndependence { VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY, VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL, VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE, - VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_BEGIN_RANGE = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY, - VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_END_RANGE = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE, - VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_RANGE_SIZE = (VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE - VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY + 1), VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_MAX_ENUM = 0x7FFFFFFF } VkShaderFloatControlsIndependence; @@ -4962,9 +4944,6 @@ typedef enum VkSamplerReductionMode { VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE, VK_SAMPLER_REDUCTION_MODE_MIN_EXT = VK_SAMPLER_REDUCTION_MODE_MIN, VK_SAMPLER_REDUCTION_MODE_MAX_EXT = VK_SAMPLER_REDUCTION_MODE_MAX, - VK_SAMPLER_REDUCTION_MODE_BEGIN_RANGE = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE, - VK_SAMPLER_REDUCTION_MODE_END_RANGE = VK_SAMPLER_REDUCTION_MODE_MAX, - VK_SAMPLER_REDUCTION_MODE_RANGE_SIZE = (VK_SAMPLER_REDUCTION_MODE_MAX - VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE + 1), VK_SAMPLER_REDUCTION_MODE_MAX_ENUM = 0x7FFFFFFF } VkSamplerReductionMode; @@ -4973,9 +4952,6 @@ typedef enum VkSemaphoreType { VK_SEMAPHORE_TYPE_TIMELINE = 1, VK_SEMAPHORE_TYPE_BINARY_KHR = VK_SEMAPHORE_TYPE_BINARY, VK_SEMAPHORE_TYPE_TIMELINE_KHR = VK_SEMAPHORE_TYPE_TIMELINE, - VK_SEMAPHORE_TYPE_BEGIN_RANGE = VK_SEMAPHORE_TYPE_BINARY, - VK_SEMAPHORE_TYPE_END_RANGE = VK_SEMAPHORE_TYPE_TIMELINE, - VK_SEMAPHORE_TYPE_RANGE_SIZE = (VK_SEMAPHORE_TYPE_TIMELINE - VK_SEMAPHORE_TYPE_BINARY + 1), VK_SEMAPHORE_TYPE_MAX_ENUM = 0x7FFFFFFF } VkSemaphoreType; @@ -5662,6 +5638,16 @@ VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSurfaceKHR) #define VK_KHR_SURFACE_SPEC_VERSION 25 #define VK_KHR_SURFACE_EXTENSION_NAME "VK_KHR_surface" +typedef enum VkPresentModeKHR { + VK_PRESENT_MODE_IMMEDIATE_KHR = 0, + VK_PRESENT_MODE_MAILBOX_KHR = 1, + VK_PRESENT_MODE_FIFO_KHR = 2, + VK_PRESENT_MODE_FIFO_RELAXED_KHR = 3, + VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR = 1000111000, + VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR = 1000111001, + VK_PRESENT_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPresentModeKHR; + typedef enum VkColorSpaceKHR { VK_COLOR_SPACE_SRGB_NONLINEAR_KHR = 0, VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT = 1000104001, @@ -5681,25 +5667,9 @@ typedef enum VkColorSpaceKHR { VK_COLOR_SPACE_DISPLAY_NATIVE_AMD = 1000213000, VK_COLORSPACE_SRGB_NONLINEAR_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, VK_COLOR_SPACE_DCI_P3_LINEAR_EXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT, - VK_COLOR_SPACE_BEGIN_RANGE_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, - VK_COLOR_SPACE_END_RANGE_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, - VK_COLOR_SPACE_RANGE_SIZE_KHR = (VK_COLOR_SPACE_SRGB_NONLINEAR_KHR - VK_COLOR_SPACE_SRGB_NONLINEAR_KHR + 1), VK_COLOR_SPACE_MAX_ENUM_KHR = 0x7FFFFFFF } VkColorSpaceKHR; -typedef enum VkPresentModeKHR { - VK_PRESENT_MODE_IMMEDIATE_KHR = 0, - VK_PRESENT_MODE_MAILBOX_KHR = 1, - VK_PRESENT_MODE_FIFO_KHR = 2, - VK_PRESENT_MODE_FIFO_RELAXED_KHR = 3, - VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR = 1000111000, - VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR = 1000111001, - VK_PRESENT_MODE_BEGIN_RANGE_KHR = VK_PRESENT_MODE_IMMEDIATE_KHR, - VK_PRESENT_MODE_END_RANGE_KHR = VK_PRESENT_MODE_FIFO_RELAXED_KHR, - VK_PRESENT_MODE_RANGE_SIZE_KHR = (VK_PRESENT_MODE_FIFO_RELAXED_KHR - VK_PRESENT_MODE_IMMEDIATE_KHR + 1), - VK_PRESENT_MODE_MAX_ENUM_KHR = 0x7FFFFFFF -} VkPresentModeKHR; - typedef enum VkSurfaceTransformFlagBitsKHR { VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR = 0x00000001, VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR = 0x00000002, @@ -5712,7 +5682,6 @@ typedef enum VkSurfaceTransformFlagBitsKHR { VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR = 0x00000100, VK_SURFACE_TRANSFORM_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF } VkSurfaceTransformFlagBitsKHR; -typedef VkFlags VkSurfaceTransformFlagsKHR; typedef enum VkCompositeAlphaFlagBitsKHR { VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, @@ -5722,6 +5691,7 @@ typedef enum VkCompositeAlphaFlagBitsKHR { VK_COMPOSITE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF } VkCompositeAlphaFlagBitsKHR; typedef VkFlags VkCompositeAlphaFlagsKHR; +typedef VkFlags VkSurfaceTransformFlagsKHR; typedef struct VkSurfaceCapabilitiesKHR { uint32_t minImageCount; uint32_t maxImageCount; @@ -5941,6 +5911,7 @@ VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayKHR) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayModeKHR) #define VK_KHR_DISPLAY_SPEC_VERSION 23 #define VK_KHR_DISPLAY_EXTENSION_NAME "VK_KHR_display" +typedef VkFlags VkDisplayModeCreateFlagsKHR; typedef enum VkDisplayPlaneAlphaFlagBitsKHR { VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, @@ -5950,28 +5921,12 @@ typedef enum VkDisplayPlaneAlphaFlagBitsKHR { VK_DISPLAY_PLANE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF } VkDisplayPlaneAlphaFlagBitsKHR; typedef VkFlags VkDisplayPlaneAlphaFlagsKHR; -typedef VkFlags VkDisplayModeCreateFlagsKHR; typedef VkFlags VkDisplaySurfaceCreateFlagsKHR; -typedef struct VkDisplayPropertiesKHR { - VkDisplayKHR display; - const char* displayName; - VkExtent2D physicalDimensions; - VkExtent2D physicalResolution; - VkSurfaceTransformFlagsKHR supportedTransforms; - VkBool32 planeReorderPossible; - VkBool32 persistentContent; -} VkDisplayPropertiesKHR; - typedef struct VkDisplayModeParametersKHR { VkExtent2D visibleRegion; uint32_t refreshRate; } VkDisplayModeParametersKHR; -typedef struct VkDisplayModePropertiesKHR { - VkDisplayModeKHR displayMode; - VkDisplayModeParametersKHR parameters; -} VkDisplayModePropertiesKHR; - typedef struct VkDisplayModeCreateInfoKHR { VkStructureType sType; const void* pNext; @@ -5979,6 +5934,11 @@ typedef struct VkDisplayModeCreateInfoKHR { VkDisplayModeParametersKHR parameters; } VkDisplayModeCreateInfoKHR; +typedef struct VkDisplayModePropertiesKHR { + VkDisplayModeKHR displayMode; + VkDisplayModeParametersKHR parameters; +} VkDisplayModePropertiesKHR; + typedef struct VkDisplayPlaneCapabilitiesKHR { VkDisplayPlaneAlphaFlagsKHR supportedAlpha; VkOffset2D minSrcPosition; @@ -5996,6 +5956,16 @@ typedef struct VkDisplayPlanePropertiesKHR { uint32_t currentStackIndex; } VkDisplayPlanePropertiesKHR; +typedef struct VkDisplayPropertiesKHR { + VkDisplayKHR display; + const char* displayName; + VkExtent2D physicalDimensions; + VkExtent2D physicalResolution; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkBool32 planeReorderPossible; + VkBool32 persistentContent; +} VkDisplayPropertiesKHR; + typedef struct VkDisplaySurfaceCreateInfoKHR { VkStructureType sType; const void* pNext; @@ -6679,9 +6649,6 @@ typedef enum VkPerformanceCounterUnitKHR { VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR = 8, VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR = 9, VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR = 10, - VK_PERFORMANCE_COUNTER_UNIT_BEGIN_RANGE_KHR = VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR, - VK_PERFORMANCE_COUNTER_UNIT_END_RANGE_KHR = VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR, - VK_PERFORMANCE_COUNTER_UNIT_RANGE_SIZE_KHR = (VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR - VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR + 1), VK_PERFORMANCE_COUNTER_UNIT_MAX_ENUM_KHR = 0x7FFFFFFF } VkPerformanceCounterUnitKHR; @@ -6692,9 +6659,6 @@ typedef enum VkPerformanceCounterScopeKHR { VK_QUERY_SCOPE_COMMAND_BUFFER_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR, VK_QUERY_SCOPE_RENDER_PASS_KHR = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR, VK_QUERY_SCOPE_COMMAND_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR, - VK_PERFORMANCE_COUNTER_SCOPE_BEGIN_RANGE_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR, - VK_PERFORMANCE_COUNTER_SCOPE_END_RANGE_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR, - VK_PERFORMANCE_COUNTER_SCOPE_RANGE_SIZE_KHR = (VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR - VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR + 1), VK_PERFORMANCE_COUNTER_SCOPE_MAX_ENUM_KHR = 0x7FFFFFFF } VkPerformanceCounterScopeKHR; @@ -6705,9 +6669,6 @@ typedef enum VkPerformanceCounterStorageKHR { VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR = 3, VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR = 4, VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR = 5, - VK_PERFORMANCE_COUNTER_STORAGE_BEGIN_RANGE_KHR = VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR, - VK_PERFORMANCE_COUNTER_STORAGE_END_RANGE_KHR = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR, - VK_PERFORMANCE_COUNTER_STORAGE_RANGE_SIZE_KHR = (VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR - VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR + 1), VK_PERFORMANCE_COUNTER_STORAGE_MAX_ENUM_KHR = 0x7FFFFFFF } VkPerformanceCounterStorageKHR; @@ -6967,6 +6928,8 @@ typedef VkImageMemoryRequirementsInfo2 VkImageMemoryRequirementsInfo2KHR; typedef VkImageSparseMemoryRequirementsInfo2 VkImageSparseMemoryRequirementsInfo2KHR; +typedef VkMemoryRequirements2 VkMemoryRequirements2KHR; + typedef VkSparseImageMemoryRequirements2 VkSparseImageMemoryRequirements2KHR; typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2KHR)(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); @@ -7303,9 +7266,6 @@ typedef enum VkPipelineExecutableStatisticFormatKHR { VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR = 1, VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR = 2, VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR = 3, - VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BEGIN_RANGE_KHR = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR, - VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_END_RANGE_KHR = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR, - VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_RANGE_SIZE_KHR = (VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR - VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR + 1), VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_MAX_ENUM_KHR = 0x7FFFFFFF } VkPipelineExecutableStatisticFormatKHR; typedef struct VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR { @@ -7387,6 +7347,159 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutableInternalRepresentationsKHR #endif +#define VK_KHR_shader_non_semantic_info 1 +#define VK_KHR_SHADER_NON_SEMANTIC_INFO_SPEC_VERSION 1 +#define VK_KHR_SHADER_NON_SEMANTIC_INFO_EXTENSION_NAME "VK_KHR_shader_non_semantic_info" + + +#define VK_KHR_copy_commands2 1 +#define VK_KHR_COPY_COMMANDS_2_SPEC_VERSION 1 +#define VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME "VK_KHR_copy_commands2" +typedef struct VkBufferCopy2KHR { + VkStructureType sType; + const void* pNext; + VkDeviceSize srcOffset; + VkDeviceSize dstOffset; + VkDeviceSize size; +} VkBufferCopy2KHR; + +typedef struct VkCopyBufferInfo2KHR { + VkStructureType sType; + const void* pNext; + VkBuffer srcBuffer; + VkBuffer dstBuffer; + uint32_t regionCount; + const VkBufferCopy2KHR* pRegions; +} VkCopyBufferInfo2KHR; + +typedef struct VkImageCopy2KHR { + VkStructureType sType; + const void* pNext; + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageCopy2KHR; + +typedef struct VkCopyImageInfo2KHR { + VkStructureType sType; + const void* pNext; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkImageCopy2KHR* pRegions; +} VkCopyImageInfo2KHR; + +typedef struct VkBufferImageCopy2KHR { + VkStructureType sType; + const void* pNext; + VkDeviceSize bufferOffset; + uint32_t bufferRowLength; + uint32_t bufferImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkBufferImageCopy2KHR; + +typedef struct VkCopyBufferToImageInfo2KHR { + VkStructureType sType; + const void* pNext; + VkBuffer srcBuffer; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkBufferImageCopy2KHR* pRegions; +} VkCopyBufferToImageInfo2KHR; + +typedef struct VkCopyImageToBufferInfo2KHR { + VkStructureType sType; + const void* pNext; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkBuffer dstBuffer; + uint32_t regionCount; + const VkBufferImageCopy2KHR* pRegions; +} VkCopyImageToBufferInfo2KHR; + +typedef struct VkImageBlit2KHR { + VkStructureType sType; + const void* pNext; + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffsets[2]; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffsets[2]; +} VkImageBlit2KHR; + +typedef struct VkBlitImageInfo2KHR { + VkStructureType sType; + const void* pNext; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkImageBlit2KHR* pRegions; + VkFilter filter; +} VkBlitImageInfo2KHR; + +typedef struct VkImageResolve2KHR { + VkStructureType sType; + const void* pNext; + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageResolve2KHR; + +typedef struct VkResolveImageInfo2KHR { + VkStructureType sType; + const void* pNext; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkImageResolve2KHR* pRegions; +} VkResolveImageInfo2KHR; + +typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer2KHR)(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR* pCopyBufferInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImage2KHR)(VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR* pCopyImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage2KHR)(VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2KHR* pCopyBufferToImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer2KHR)(VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2KHR* pCopyImageToBufferInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBlitImage2KHR)(VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR* pBlitImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdResolveImage2KHR)(VkCommandBuffer commandBuffer, const VkResolveImageInfo2KHR* pResolveImageInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer2KHR( + VkCommandBuffer commandBuffer, + const VkCopyBufferInfo2KHR* pCopyBufferInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage2KHR( + VkCommandBuffer commandBuffer, + const VkCopyImageInfo2KHR* pCopyImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage2KHR( + VkCommandBuffer commandBuffer, + const VkCopyBufferToImageInfo2KHR* pCopyBufferToImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer2KHR( + VkCommandBuffer commandBuffer, + const VkCopyImageToBufferInfo2KHR* pCopyImageToBufferInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage2KHR( + VkCommandBuffer commandBuffer, + const VkBlitImageInfo2KHR* pBlitImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage2KHR( + VkCommandBuffer commandBuffer, + const VkResolveImageInfo2KHR* pResolveImageInfo); +#endif + + #define VK_EXT_debug_report 1 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugReportCallbackEXT) #define VK_EXT_DEBUG_REPORT_SPEC_VERSION 9 @@ -7424,19 +7537,15 @@ typedef enum VkDebugReportObjectTypeEXT { VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT = 28, VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT = 29, VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT = 30, - VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT = 31, - VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT = 32, VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT = 33, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT = 1000156000, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT = 1000085000, - VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT = 1000165000, + VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT = 1000165000, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, - VK_DEBUG_REPORT_OBJECT_TYPE_BEGIN_RANGE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, - VK_DEBUG_REPORT_OBJECT_TYPE_END_RANGE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, - VK_DEBUG_REPORT_OBJECT_TYPE_RANGE_SIZE_EXT = (VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT - VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT + 1), + VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF } VkDebugReportObjectTypeEXT; @@ -7517,9 +7626,6 @@ VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT( typedef enum VkRasterizationOrderAMD { VK_RASTERIZATION_ORDER_STRICT_AMD = 0, VK_RASTERIZATION_ORDER_RELAXED_AMD = 1, - VK_RASTERIZATION_ORDER_BEGIN_RANGE_AMD = VK_RASTERIZATION_ORDER_STRICT_AMD, - VK_RASTERIZATION_ORDER_END_RANGE_AMD = VK_RASTERIZATION_ORDER_RELAXED_AMD, - VK_RASTERIZATION_ORDER_RANGE_SIZE_AMD = (VK_RASTERIZATION_ORDER_RELAXED_AMD - VK_RASTERIZATION_ORDER_STRICT_AMD + 1), VK_RASTERIZATION_ORDER_MAX_ENUM_AMD = 0x7FFFFFFF } VkRasterizationOrderAMD; typedef struct VkPipelineRasterizationStateRasterizationOrderAMD { @@ -7713,7 +7819,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectByteCountEXT( #define VK_NVX_image_view_handle 1 -#define VK_NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION 1 +#define VK_NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION 2 #define VK_NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME "VK_NVX_image_view_handle" typedef struct VkImageViewHandleInfoNVX { VkStructureType sType; @@ -7723,12 +7829,25 @@ typedef struct VkImageViewHandleInfoNVX { VkSampler sampler; } VkImageViewHandleInfoNVX; +typedef struct VkImageViewAddressPropertiesNVX { + VkStructureType sType; + void* pNext; + VkDeviceAddress deviceAddress; + VkDeviceSize size; +} VkImageViewAddressPropertiesNVX; + typedef uint32_t (VKAPI_PTR *PFN_vkGetImageViewHandleNVX)(VkDevice device, const VkImageViewHandleInfoNVX* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetImageViewAddressNVX)(VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX* pProperties); #ifndef VK_NO_PROTOTYPES VKAPI_ATTR uint32_t VKAPI_CALL vkGetImageViewHandleNVX( VkDevice device, const VkImageViewHandleInfoNVX* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetImageViewAddressNVX( + VkDevice device, + VkImageView imageView, + VkImageViewAddressPropertiesNVX* pProperties); #endif @@ -7793,9 +7912,6 @@ typedef enum VkShaderInfoTypeAMD { VK_SHADER_INFO_TYPE_STATISTICS_AMD = 0, VK_SHADER_INFO_TYPE_BINARY_AMD = 1, VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD = 2, - VK_SHADER_INFO_TYPE_BEGIN_RANGE_AMD = VK_SHADER_INFO_TYPE_STATISTICS_AMD, - VK_SHADER_INFO_TYPE_END_RANGE_AMD = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD, - VK_SHADER_INFO_TYPE_RANGE_SIZE_AMD = (VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD - VK_SHADER_INFO_TYPE_STATISTICS_AMD + 1), VK_SHADER_INFO_TYPE_MAX_ENUM_AMD = 0x7FFFFFFF } VkShaderInfoTypeAMD; typedef struct VkShaderResourceUsageAMD { @@ -7916,9 +8032,6 @@ typedef struct VkExportMemoryAllocateInfoNV { typedef enum VkValidationCheckEXT { VK_VALIDATION_CHECK_ALL_EXT = 0, VK_VALIDATION_CHECK_SHADERS_EXT = 1, - VK_VALIDATION_CHECK_BEGIN_RANGE_EXT = VK_VALIDATION_CHECK_ALL_EXT, - VK_VALIDATION_CHECK_END_RANGE_EXT = VK_VALIDATION_CHECK_SHADERS_EXT, - VK_VALIDATION_CHECK_RANGE_SIZE_EXT = (VK_VALIDATION_CHECK_SHADERS_EXT - VK_VALIDATION_CHECK_ALL_EXT + 1), VK_VALIDATION_CHECK_MAX_ENUM_EXT = 0x7FFFFFFF } VkValidationCheckEXT; typedef struct VkValidationFlagsEXT { @@ -8011,229 +8124,6 @@ VKAPI_ATTR void VKAPI_CALL vkCmdEndConditionalRenderingEXT( #endif -#define VK_NVX_device_generated_commands 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkObjectTableNVX) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNVX) -#define VK_NVX_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 3 -#define VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_NVX_device_generated_commands" - -typedef enum VkIndirectCommandsTokenTypeNVX { - VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX = 0, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX = 1, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX = 2, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX = 3, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX = 4, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX = 5, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX = 6, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX = 7, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_BEGIN_RANGE_NVX = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_END_RANGE_NVX = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_RANGE_SIZE_NVX = (VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX - VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX + 1), - VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_NVX = 0x7FFFFFFF -} VkIndirectCommandsTokenTypeNVX; - -typedef enum VkObjectEntryTypeNVX { - VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX = 0, - VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX = 1, - VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX = 2, - VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX = 3, - VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX = 4, - VK_OBJECT_ENTRY_TYPE_BEGIN_RANGE_NVX = VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX, - VK_OBJECT_ENTRY_TYPE_END_RANGE_NVX = VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX, - VK_OBJECT_ENTRY_TYPE_RANGE_SIZE_NVX = (VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX - VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX + 1), - VK_OBJECT_ENTRY_TYPE_MAX_ENUM_NVX = 0x7FFFFFFF -} VkObjectEntryTypeNVX; - -typedef enum VkIndirectCommandsLayoutUsageFlagBitsNVX { - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX = 0x00000001, - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX = 0x00000002, - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX = 0x00000004, - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX = 0x00000008, - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_NVX = 0x7FFFFFFF -} VkIndirectCommandsLayoutUsageFlagBitsNVX; -typedef VkFlags VkIndirectCommandsLayoutUsageFlagsNVX; - -typedef enum VkObjectEntryUsageFlagBitsNVX { - VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX = 0x00000001, - VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX = 0x00000002, - VK_OBJECT_ENTRY_USAGE_FLAG_BITS_MAX_ENUM_NVX = 0x7FFFFFFF -} VkObjectEntryUsageFlagBitsNVX; -typedef VkFlags VkObjectEntryUsageFlagsNVX; -typedef struct VkDeviceGeneratedCommandsFeaturesNVX { - VkStructureType sType; - const void* pNext; - VkBool32 computeBindingPointSupport; -} VkDeviceGeneratedCommandsFeaturesNVX; - -typedef struct VkDeviceGeneratedCommandsLimitsNVX { - VkStructureType sType; - const void* pNext; - uint32_t maxIndirectCommandsLayoutTokenCount; - uint32_t maxObjectEntryCounts; - uint32_t minSequenceCountBufferOffsetAlignment; - uint32_t minSequenceIndexBufferOffsetAlignment; - uint32_t minCommandsTokenBufferOffsetAlignment; -} VkDeviceGeneratedCommandsLimitsNVX; - -typedef struct VkIndirectCommandsTokenNVX { - VkIndirectCommandsTokenTypeNVX tokenType; - VkBuffer buffer; - VkDeviceSize offset; -} VkIndirectCommandsTokenNVX; - -typedef struct VkIndirectCommandsLayoutTokenNVX { - VkIndirectCommandsTokenTypeNVX tokenType; - uint32_t bindingUnit; - uint32_t dynamicCount; - uint32_t divisor; -} VkIndirectCommandsLayoutTokenNVX; - -typedef struct VkIndirectCommandsLayoutCreateInfoNVX { - VkStructureType sType; - const void* pNext; - VkPipelineBindPoint pipelineBindPoint; - VkIndirectCommandsLayoutUsageFlagsNVX flags; - uint32_t tokenCount; - const VkIndirectCommandsLayoutTokenNVX* pTokens; -} VkIndirectCommandsLayoutCreateInfoNVX; - -typedef struct VkCmdProcessCommandsInfoNVX { - VkStructureType sType; - const void* pNext; - VkObjectTableNVX objectTable; - VkIndirectCommandsLayoutNVX indirectCommandsLayout; - uint32_t indirectCommandsTokenCount; - const VkIndirectCommandsTokenNVX* pIndirectCommandsTokens; - uint32_t maxSequencesCount; - VkCommandBuffer targetCommandBuffer; - VkBuffer sequencesCountBuffer; - VkDeviceSize sequencesCountOffset; - VkBuffer sequencesIndexBuffer; - VkDeviceSize sequencesIndexOffset; -} VkCmdProcessCommandsInfoNVX; - -typedef struct VkCmdReserveSpaceForCommandsInfoNVX { - VkStructureType sType; - const void* pNext; - VkObjectTableNVX objectTable; - VkIndirectCommandsLayoutNVX indirectCommandsLayout; - uint32_t maxSequencesCount; -} VkCmdReserveSpaceForCommandsInfoNVX; - -typedef struct VkObjectTableCreateInfoNVX { - VkStructureType sType; - const void* pNext; - uint32_t objectCount; - const VkObjectEntryTypeNVX* pObjectEntryTypes; - const uint32_t* pObjectEntryCounts; - const VkObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags; - uint32_t maxUniformBuffersPerDescriptor; - uint32_t maxStorageBuffersPerDescriptor; - uint32_t maxStorageImagesPerDescriptor; - uint32_t maxSampledImagesPerDescriptor; - uint32_t maxPipelineLayouts; -} VkObjectTableCreateInfoNVX; - -typedef struct VkObjectTableEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; -} VkObjectTableEntryNVX; - -typedef struct VkObjectTablePipelineEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkPipeline pipeline; -} VkObjectTablePipelineEntryNVX; - -typedef struct VkObjectTableDescriptorSetEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkPipelineLayout pipelineLayout; - VkDescriptorSet descriptorSet; -} VkObjectTableDescriptorSetEntryNVX; - -typedef struct VkObjectTableVertexBufferEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkBuffer buffer; -} VkObjectTableVertexBufferEntryNVX; - -typedef struct VkObjectTableIndexBufferEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkBuffer buffer; - VkIndexType indexType; -} VkObjectTableIndexBufferEntryNVX; - -typedef struct VkObjectTablePushConstantEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkPipelineLayout pipelineLayout; - VkShaderStageFlags stageFlags; -} VkObjectTablePushConstantEntryNVX; - -typedef void (VKAPI_PTR *PFN_vkCmdProcessCommandsNVX)(VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo); -typedef void (VKAPI_PTR *PFN_vkCmdReserveSpaceForCommandsNVX)(VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo); -typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutNVX)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout); -typedef void (VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutNVX)(VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkCreateObjectTableNVX)(VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable); -typedef void (VKAPI_PTR *PFN_vkDestroyObjectTableNVX)(VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkRegisterObjectsNVX)(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices); -typedef VkResult (VKAPI_PTR *PFN_vkUnregisterObjectsNVX)(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX)(VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, VkDeviceGeneratedCommandsLimitsNVX* pLimits); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdProcessCommandsNVX( - VkCommandBuffer commandBuffer, - const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo); - -VKAPI_ATTR void VKAPI_CALL vkCmdReserveSpaceForCommandsNVX( - VkCommandBuffer commandBuffer, - const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutNVX( - VkDevice device, - const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout); - -VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutNVX( - VkDevice device, - VkIndirectCommandsLayoutNVX indirectCommandsLayout, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateObjectTableNVX( - VkDevice device, - const VkObjectTableCreateInfoNVX* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkObjectTableNVX* pObjectTable); - -VKAPI_ATTR void VKAPI_CALL vkDestroyObjectTableNVX( - VkDevice device, - VkObjectTableNVX objectTable, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkRegisterObjectsNVX( - VkDevice device, - VkObjectTableNVX objectTable, - uint32_t objectCount, - const VkObjectTableEntryNVX* const* ppObjectTableEntries, - const uint32_t* pObjectIndices); - -VKAPI_ATTR VkResult VKAPI_CALL vkUnregisterObjectsNVX( - VkDevice device, - VkObjectTableNVX objectTable, - uint32_t objectCount, - const VkObjectEntryTypeNVX* pObjectEntryTypes, - const uint32_t* pObjectIndices); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( - VkPhysicalDevice physicalDevice, - VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, - VkDeviceGeneratedCommandsLimitsNVX* pLimits); -#endif - - #define VK_NV_clip_space_w_scaling 1 #define VK_NV_CLIP_SPACE_W_SCALING_SPEC_VERSION 1 #define VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME "VK_NV_clip_space_w_scaling" @@ -8316,25 +8206,16 @@ typedef enum VkDisplayPowerStateEXT { VK_DISPLAY_POWER_STATE_OFF_EXT = 0, VK_DISPLAY_POWER_STATE_SUSPEND_EXT = 1, VK_DISPLAY_POWER_STATE_ON_EXT = 2, - VK_DISPLAY_POWER_STATE_BEGIN_RANGE_EXT = VK_DISPLAY_POWER_STATE_OFF_EXT, - VK_DISPLAY_POWER_STATE_END_RANGE_EXT = VK_DISPLAY_POWER_STATE_ON_EXT, - VK_DISPLAY_POWER_STATE_RANGE_SIZE_EXT = (VK_DISPLAY_POWER_STATE_ON_EXT - VK_DISPLAY_POWER_STATE_OFF_EXT + 1), VK_DISPLAY_POWER_STATE_MAX_ENUM_EXT = 0x7FFFFFFF } VkDisplayPowerStateEXT; typedef enum VkDeviceEventTypeEXT { VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT = 0, - VK_DEVICE_EVENT_TYPE_BEGIN_RANGE_EXT = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT, - VK_DEVICE_EVENT_TYPE_END_RANGE_EXT = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT, - VK_DEVICE_EVENT_TYPE_RANGE_SIZE_EXT = (VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT - VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT + 1), VK_DEVICE_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF } VkDeviceEventTypeEXT; typedef enum VkDisplayEventTypeEXT { VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT = 0, - VK_DISPLAY_EVENT_TYPE_BEGIN_RANGE_EXT = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT, - VK_DISPLAY_EVENT_TYPE_END_RANGE_EXT = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT, - VK_DISPLAY_EVENT_TYPE_RANGE_SIZE_EXT = (VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT - VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT + 1), VK_DISPLAY_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF } VkDisplayEventTypeEXT; typedef struct VkDisplayPowerInfoEXT { @@ -8476,9 +8357,6 @@ typedef enum VkViewportCoordinateSwizzleNV { VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV = 5, VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV = 6, VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV = 7, - VK_VIEWPORT_COORDINATE_SWIZZLE_BEGIN_RANGE_NV = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV, - VK_VIEWPORT_COORDINATE_SWIZZLE_END_RANGE_NV = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV, - VK_VIEWPORT_COORDINATE_SWIZZLE_RANGE_SIZE_NV = (VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV + 1), VK_VIEWPORT_COORDINATE_SWIZZLE_MAX_ENUM_NV = 0x7FFFFFFF } VkViewportCoordinateSwizzleNV; typedef VkFlags VkPipelineViewportSwizzleStateCreateFlagsNV; @@ -8506,9 +8384,6 @@ typedef struct VkPipelineViewportSwizzleStateCreateInfoNV { typedef enum VkDiscardRectangleModeEXT { VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT = 0, VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT = 1, - VK_DISCARD_RECTANGLE_MODE_BEGIN_RANGE_EXT = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT, - VK_DISCARD_RECTANGLE_MODE_END_RANGE_EXT = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT, - VK_DISCARD_RECTANGLE_MODE_RANGE_SIZE_EXT = (VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT - VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT + 1), VK_DISCARD_RECTANGLE_MODE_MAX_ENUM_EXT = 0x7FFFFFFF } VkDiscardRectangleModeEXT; typedef VkFlags VkPipelineDiscardRectangleStateCreateFlagsEXT; @@ -8546,9 +8421,6 @@ typedef enum VkConservativeRasterizationModeEXT { VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT = 0, VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT = 1, VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT = 2, - VK_CONSERVATIVE_RASTERIZATION_MODE_BEGIN_RANGE_EXT = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT, - VK_CONSERVATIVE_RASTERIZATION_MODE_END_RANGE_EXT = VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT, - VK_CONSERVATIVE_RASTERIZATION_MODE_RANGE_SIZE_EXT = (VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT - VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT + 1), VK_CONSERVATIVE_RASTERIZATION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF } VkConservativeRasterizationModeEXT; typedef VkFlags VkPipelineRasterizationConservativeStateCreateFlagsEXT; @@ -8645,10 +8517,9 @@ VKAPI_ATTR void VKAPI_CALL vkSetHdrMetadataEXT( #define VK_EXT_debug_utils 1 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugUtilsMessengerEXT) -#define VK_EXT_DEBUG_UTILS_SPEC_VERSION 1 +#define VK_EXT_DEBUG_UTILS_SPEC_VERSION 2 #define VK_EXT_DEBUG_UTILS_EXTENSION_NAME "VK_EXT_debug_utils" typedef VkFlags VkDebugUtilsMessengerCallbackDataFlagsEXT; -typedef VkFlags VkDebugUtilsMessengerCreateFlagsEXT; typedef enum VkDebugUtilsMessageSeverityFlagBitsEXT { VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT = 0x00000001, @@ -8657,7 +8528,6 @@ typedef enum VkDebugUtilsMessageSeverityFlagBitsEXT { VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT = 0x00001000, VK_DEBUG_UTILS_MESSAGE_SEVERITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF } VkDebugUtilsMessageSeverityFlagBitsEXT; -typedef VkFlags VkDebugUtilsMessageSeverityFlagsEXT; typedef enum VkDebugUtilsMessageTypeFlagBitsEXT { VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT = 0x00000001, @@ -8666,6 +8536,15 @@ typedef enum VkDebugUtilsMessageTypeFlagBitsEXT { VK_DEBUG_UTILS_MESSAGE_TYPE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF } VkDebugUtilsMessageTypeFlagBitsEXT; typedef VkFlags VkDebugUtilsMessageTypeFlagsEXT; +typedef VkFlags VkDebugUtilsMessageSeverityFlagsEXT; +typedef VkFlags VkDebugUtilsMessengerCreateFlagsEXT; +typedef struct VkDebugUtilsLabelEXT { + VkStructureType sType; + const void* pNext; + const char* pLabelName; + float color[4]; +} VkDebugUtilsLabelEXT; + typedef struct VkDebugUtilsObjectNameInfoEXT { VkStructureType sType; const void* pNext; @@ -8674,23 +8553,6 @@ typedef struct VkDebugUtilsObjectNameInfoEXT { const char* pObjectName; } VkDebugUtilsObjectNameInfoEXT; -typedef struct VkDebugUtilsObjectTagInfoEXT { - VkStructureType sType; - const void* pNext; - VkObjectType objectType; - uint64_t objectHandle; - uint64_t tagName; - size_t tagSize; - const void* pTag; -} VkDebugUtilsObjectTagInfoEXT; - -typedef struct VkDebugUtilsLabelEXT { - VkStructureType sType; - const void* pNext; - const char* pLabelName; - float color[4]; -} VkDebugUtilsLabelEXT; - typedef struct VkDebugUtilsMessengerCallbackDataEXT { VkStructureType sType; const void* pNext; @@ -8722,6 +8584,16 @@ typedef struct VkDebugUtilsMessengerCreateInfoEXT { void* pUserData; } VkDebugUtilsMessengerCreateInfoEXT; +typedef struct VkDebugUtilsObjectTagInfoEXT { + VkStructureType sType; + const void* pNext; + VkObjectType objectType; + uint64_t objectHandle; + uint64_t tagName; + size_t tagSize; + const void* pTag; +} VkDebugUtilsObjectTagInfoEXT; + typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectNameEXT)(VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo); typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectTagEXT)(VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo); typedef void (VKAPI_PTR *PFN_vkQueueBeginDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo); @@ -8932,9 +8804,6 @@ typedef enum VkBlendOverlapEXT { VK_BLEND_OVERLAP_UNCORRELATED_EXT = 0, VK_BLEND_OVERLAP_DISJOINT_EXT = 1, VK_BLEND_OVERLAP_CONJOINT_EXT = 2, - VK_BLEND_OVERLAP_BEGIN_RANGE_EXT = VK_BLEND_OVERLAP_UNCORRELATED_EXT, - VK_BLEND_OVERLAP_END_RANGE_EXT = VK_BLEND_OVERLAP_CONJOINT_EXT, - VK_BLEND_OVERLAP_RANGE_SIZE_EXT = (VK_BLEND_OVERLAP_CONJOINT_EXT - VK_BLEND_OVERLAP_UNCORRELATED_EXT + 1), VK_BLEND_OVERLAP_MAX_ENUM_EXT = 0x7FFFFFFF } VkBlendOverlapEXT; typedef struct VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT { @@ -8987,9 +8856,6 @@ typedef enum VkCoverageModulationModeNV { VK_COVERAGE_MODULATION_MODE_RGB_NV = 1, VK_COVERAGE_MODULATION_MODE_ALPHA_NV = 2, VK_COVERAGE_MODULATION_MODE_RGBA_NV = 3, - VK_COVERAGE_MODULATION_MODE_BEGIN_RANGE_NV = VK_COVERAGE_MODULATION_MODE_NONE_NV, - VK_COVERAGE_MODULATION_MODE_END_RANGE_NV = VK_COVERAGE_MODULATION_MODE_RGBA_NV, - VK_COVERAGE_MODULATION_MODE_RANGE_SIZE_NV = (VK_COVERAGE_MODULATION_MODE_RGBA_NV - VK_COVERAGE_MODULATION_MODE_NONE_NV + 1), VK_COVERAGE_MODULATION_MODE_MAX_ENUM_NV = 0x7FFFFFFF } VkCoverageModulationModeNV; typedef VkFlags VkPipelineCoverageModulationStateCreateFlagsNV; @@ -9096,9 +8962,6 @@ VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkValidationCacheEXT) typedef enum VkValidationCacheHeaderVersionEXT { VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT = 1, - VK_VALIDATION_CACHE_HEADER_VERSION_BEGIN_RANGE_EXT = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT, - VK_VALIDATION_CACHE_HEADER_VERSION_END_RANGE_EXT = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT, - VK_VALIDATION_CACHE_HEADER_VERSION_RANGE_SIZE_EXT = (VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT - VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT + 1), VK_VALIDATION_CACHE_HEADER_VERSION_MAX_ENUM_EXT = 0x7FFFFFFF } VkValidationCacheHeaderVersionEXT; typedef VkFlags VkValidationCacheCreateFlagsEXT; @@ -9188,9 +9051,6 @@ typedef enum VkShadingRatePaletteEntryNV { VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV = 9, VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV = 10, VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV = 11, - VK_SHADING_RATE_PALETTE_ENTRY_BEGIN_RANGE_NV = VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV, - VK_SHADING_RATE_PALETTE_ENTRY_END_RANGE_NV = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV, - VK_SHADING_RATE_PALETTE_ENTRY_RANGE_SIZE_NV = (VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV - VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV + 1), VK_SHADING_RATE_PALETTE_ENTRY_MAX_ENUM_NV = 0x7FFFFFFF } VkShadingRatePaletteEntryNV; @@ -9199,9 +9059,6 @@ typedef enum VkCoarseSampleOrderTypeNV { VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV = 1, VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV = 2, VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV = 3, - VK_COARSE_SAMPLE_ORDER_TYPE_BEGIN_RANGE_NV = VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV, - VK_COARSE_SAMPLE_ORDER_TYPE_END_RANGE_NV = VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV, - VK_COARSE_SAMPLE_ORDER_TYPE_RANGE_SIZE_NV = (VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV - VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV + 1), VK_COARSE_SAMPLE_ORDER_TYPE_MAX_ENUM_NV = 0x7FFFFFFF } VkCoarseSampleOrderTypeNV; typedef struct VkShadingRatePaletteNV { @@ -9278,91 +9135,127 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetCoarseSampleOrderNV( #define VK_NV_ray_tracing 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureNV) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureKHR) +typedef VkAccelerationStructureKHR VkAccelerationStructureNV; + #define VK_NV_RAY_TRACING_SPEC_VERSION 3 #define VK_NV_RAY_TRACING_EXTENSION_NAME "VK_NV_ray_tracing" -#define VK_SHADER_UNUSED_NV (~0U) +#define VK_SHADER_UNUSED_KHR (~0U) +#define VK_SHADER_UNUSED_NV VK_SHADER_UNUSED_KHR -typedef enum VkAccelerationStructureTypeNV { - VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV = 0, - VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV = 1, - VK_ACCELERATION_STRUCTURE_TYPE_BEGIN_RANGE_NV = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV, - VK_ACCELERATION_STRUCTURE_TYPE_END_RANGE_NV = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV, - VK_ACCELERATION_STRUCTURE_TYPE_RANGE_SIZE_NV = (VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV - VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV + 1), - VK_ACCELERATION_STRUCTURE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF -} VkAccelerationStructureTypeNV; +typedef enum VkRayTracingShaderGroupTypeKHR { + VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR = 0, + VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR = 1, + VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR = 2, + VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR, + VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR, + VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR, + VK_RAY_TRACING_SHADER_GROUP_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkRayTracingShaderGroupTypeKHR; +typedef VkRayTracingShaderGroupTypeKHR VkRayTracingShaderGroupTypeNV; -typedef enum VkRayTracingShaderGroupTypeNV { - VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV = 0, - VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV = 1, - VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV = 2, - VK_RAY_TRACING_SHADER_GROUP_TYPE_BEGIN_RANGE_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV, - VK_RAY_TRACING_SHADER_GROUP_TYPE_END_RANGE_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV, - VK_RAY_TRACING_SHADER_GROUP_TYPE_RANGE_SIZE_NV = (VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV - VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV + 1), - VK_RAY_TRACING_SHADER_GROUP_TYPE_MAX_ENUM_NV = 0x7FFFFFFF -} VkRayTracingShaderGroupTypeNV; -typedef enum VkGeometryTypeNV { - VK_GEOMETRY_TYPE_TRIANGLES_NV = 0, - VK_GEOMETRY_TYPE_AABBS_NV = 1, - VK_GEOMETRY_TYPE_BEGIN_RANGE_NV = VK_GEOMETRY_TYPE_TRIANGLES_NV, - VK_GEOMETRY_TYPE_END_RANGE_NV = VK_GEOMETRY_TYPE_AABBS_NV, - VK_GEOMETRY_TYPE_RANGE_SIZE_NV = (VK_GEOMETRY_TYPE_AABBS_NV - VK_GEOMETRY_TYPE_TRIANGLES_NV + 1), - VK_GEOMETRY_TYPE_MAX_ENUM_NV = 0x7FFFFFFF -} VkGeometryTypeNV; +typedef enum VkGeometryTypeKHR { + VK_GEOMETRY_TYPE_TRIANGLES_KHR = 0, + VK_GEOMETRY_TYPE_AABBS_KHR = 1, + VK_GEOMETRY_TYPE_INSTANCES_KHR = 1000150000, + VK_GEOMETRY_TYPE_TRIANGLES_NV = VK_GEOMETRY_TYPE_TRIANGLES_KHR, + VK_GEOMETRY_TYPE_AABBS_NV = VK_GEOMETRY_TYPE_AABBS_KHR, + VK_GEOMETRY_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkGeometryTypeKHR; +typedef VkGeometryTypeKHR VkGeometryTypeNV; -typedef enum VkCopyAccelerationStructureModeNV { - VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV = 0, - VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV = 1, - VK_COPY_ACCELERATION_STRUCTURE_MODE_BEGIN_RANGE_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV, - VK_COPY_ACCELERATION_STRUCTURE_MODE_END_RANGE_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV, - VK_COPY_ACCELERATION_STRUCTURE_MODE_RANGE_SIZE_NV = (VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV - VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV + 1), - VK_COPY_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_NV = 0x7FFFFFFF -} VkCopyAccelerationStructureModeNV; -typedef enum VkAccelerationStructureMemoryRequirementsTypeNV { - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV = 0, - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV = 1, - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV = 2, - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BEGIN_RANGE_NV = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV, - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_END_RANGE_NV = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV, - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_RANGE_SIZE_NV = (VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV + 1), - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_MAX_ENUM_NV = 0x7FFFFFFF -} VkAccelerationStructureMemoryRequirementsTypeNV; +typedef enum VkAccelerationStructureTypeKHR { + VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR = 0, + VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR = 1, + VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR, + VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR, + VK_ACCELERATION_STRUCTURE_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureTypeKHR; +typedef VkAccelerationStructureTypeKHR VkAccelerationStructureTypeNV; -typedef enum VkGeometryFlagBitsNV { - VK_GEOMETRY_OPAQUE_BIT_NV = 0x00000001, - VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV = 0x00000002, - VK_GEOMETRY_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF -} VkGeometryFlagBitsNV; -typedef VkFlags VkGeometryFlagsNV; -typedef enum VkGeometryInstanceFlagBitsNV { - VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV = 0x00000001, - VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV = 0x00000002, - VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV = 0x00000004, - VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV = 0x00000008, - VK_GEOMETRY_INSTANCE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF -} VkGeometryInstanceFlagBitsNV; -typedef VkFlags VkGeometryInstanceFlagsNV; +typedef enum VkCopyAccelerationStructureModeKHR { + VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR = 0, + VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR = 1, + VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR = 2, + VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR = 3, + VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR, + VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR, + VK_COPY_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkCopyAccelerationStructureModeKHR; +typedef VkCopyAccelerationStructureModeKHR VkCopyAccelerationStructureModeNV; + + +typedef enum VkAccelerationStructureMemoryRequirementsTypeKHR { + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR = 0, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR = 1, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR = 2, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureMemoryRequirementsTypeKHR; +typedef VkAccelerationStructureMemoryRequirementsTypeKHR VkAccelerationStructureMemoryRequirementsTypeNV; + + +typedef enum VkGeometryFlagBitsKHR { + VK_GEOMETRY_OPAQUE_BIT_KHR = 0x00000001, + VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR = 0x00000002, + VK_GEOMETRY_OPAQUE_BIT_NV = VK_GEOMETRY_OPAQUE_BIT_KHR, + VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR, + VK_GEOMETRY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkGeometryFlagBitsKHR; +typedef VkFlags VkGeometryFlagsKHR; +typedef VkGeometryFlagsKHR VkGeometryFlagsNV; + +typedef VkGeometryFlagBitsKHR VkGeometryFlagBitsNV; + + +typedef enum VkGeometryInstanceFlagBitsKHR { + VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR = 0x00000001, + VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR = 0x00000002, + VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR = 0x00000004, + VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR = 0x00000008, + VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR, + VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR, + VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR, + VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR, + VK_GEOMETRY_INSTANCE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkGeometryInstanceFlagBitsKHR; +typedef VkFlags VkGeometryInstanceFlagsKHR; +typedef VkGeometryInstanceFlagsKHR VkGeometryInstanceFlagsNV; + +typedef VkGeometryInstanceFlagBitsKHR VkGeometryInstanceFlagBitsNV; + + +typedef enum VkBuildAccelerationStructureFlagBitsKHR { + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR = 0x00000001, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR = 0x00000002, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR = 0x00000004, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR = 0x00000008, + VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR = 0x00000010, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkBuildAccelerationStructureFlagBitsKHR; +typedef VkFlags VkBuildAccelerationStructureFlagsKHR; +typedef VkBuildAccelerationStructureFlagsKHR VkBuildAccelerationStructureFlagsNV; + +typedef VkBuildAccelerationStructureFlagBitsKHR VkBuildAccelerationStructureFlagBitsNV; -typedef enum VkBuildAccelerationStructureFlagBitsNV { - VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV = 0x00000001, - VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV = 0x00000002, - VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV = 0x00000004, - VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV = 0x00000008, - VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV = 0x00000010, - VK_BUILD_ACCELERATION_STRUCTURE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF -} VkBuildAccelerationStructureFlagBitsNV; -typedef VkFlags VkBuildAccelerationStructureFlagsNV; typedef struct VkRayTracingShaderGroupCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkRayTracingShaderGroupTypeNV type; - uint32_t generalShader; - uint32_t closestHitShader; - uint32_t anyHitShader; - uint32_t intersectionShader; + VkStructureType sType; + const void* pNext; + VkRayTracingShaderGroupTypeKHR type; + uint32_t generalShader; + uint32_t closestHitShader; + uint32_t anyHitShader; + uint32_t intersectionShader; } VkRayTracingShaderGroupCreateInfoNV; typedef struct VkRayTracingPipelineCreateInfoNV { @@ -9410,11 +9303,11 @@ typedef struct VkGeometryDataNV { } VkGeometryDataNV; typedef struct VkGeometryNV { - VkStructureType sType; - const void* pNext; - VkGeometryTypeNV geometryType; - VkGeometryDataNV geometry; - VkGeometryFlagsNV flags; + VkStructureType sType; + const void* pNext; + VkGeometryTypeKHR geometryType; + VkGeometryDataNV geometry; + VkGeometryFlagsKHR flags; } VkGeometryNV; typedef struct VkAccelerationStructureInfoNV { @@ -9434,22 +9327,26 @@ typedef struct VkAccelerationStructureCreateInfoNV { VkAccelerationStructureInfoNV info; } VkAccelerationStructureCreateInfoNV; -typedef struct VkBindAccelerationStructureMemoryInfoNV { - VkStructureType sType; - const void* pNext; - VkAccelerationStructureNV accelerationStructure; - VkDeviceMemory memory; - VkDeviceSize memoryOffset; - uint32_t deviceIndexCount; - const uint32_t* pDeviceIndices; -} VkBindAccelerationStructureMemoryInfoNV; +typedef struct VkBindAccelerationStructureMemoryInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureKHR accelerationStructure; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; +} VkBindAccelerationStructureMemoryInfoKHR; -typedef struct VkWriteDescriptorSetAccelerationStructureNV { - VkStructureType sType; - const void* pNext; - uint32_t accelerationStructureCount; - const VkAccelerationStructureNV* pAccelerationStructures; -} VkWriteDescriptorSetAccelerationStructureNV; +typedef VkBindAccelerationStructureMemoryInfoKHR VkBindAccelerationStructureMemoryInfoNV; + +typedef struct VkWriteDescriptorSetAccelerationStructureKHR { + VkStructureType sType; + const void* pNext; + uint32_t accelerationStructureCount; + const VkAccelerationStructureKHR* pAccelerationStructures; +} VkWriteDescriptorSetAccelerationStructureKHR; + +typedef VkWriteDescriptorSetAccelerationStructureKHR VkWriteDescriptorSetAccelerationStructureNV; typedef struct VkAccelerationStructureMemoryRequirementsInfoNV { VkStructureType sType; @@ -9471,17 +9368,49 @@ typedef struct VkPhysicalDeviceRayTracingPropertiesNV { uint32_t maxDescriptorSetAccelerationStructures; } VkPhysicalDeviceRayTracingPropertiesNV; +typedef struct VkTransformMatrixKHR { + float matrix[3][4]; +} VkTransformMatrixKHR; + +typedef VkTransformMatrixKHR VkTransformMatrixNV; + +typedef struct VkAabbPositionsKHR { + float minX; + float minY; + float minZ; + float maxX; + float maxY; + float maxZ; +} VkAabbPositionsKHR; + +typedef VkAabbPositionsKHR VkAabbPositionsNV; + +typedef struct VkAccelerationStructureInstanceKHR { + VkTransformMatrixKHR transform; + uint32_t instanceCustomIndex:24; + uint32_t mask:8; + uint32_t instanceShaderBindingTableRecordOffset:24; + VkGeometryInstanceFlagsKHR flags:8; + uint64_t accelerationStructureReference; +} VkAccelerationStructureInstanceKHR; + +typedef VkAccelerationStructureInstanceKHR VkAccelerationStructureInstanceNV; + typedef VkResult (VKAPI_PTR *PFN_vkCreateAccelerationStructureNV)(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure); -typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureKHR)(VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureNV)(VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator); typedef void (VKAPI_PTR *PFN_vkGetAccelerationStructureMemoryRequirementsNV)(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements); -typedef VkResult (VKAPI_PTR *PFN_vkBindAccelerationStructureMemoryNV)(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos); -typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructureNV)(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset); -typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureNV)(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode); +typedef VkResult (VKAPI_PTR *PFN_vkBindAccelerationStructureMemoryKHR)(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos); +typedef VkResult (VKAPI_PTR *PFN_vkBindAccelerationStructureMemoryNV)(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos); +typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructureNV)(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureKHR dst, VkAccelerationStructureKHR src, VkBuffer scratch, VkDeviceSize scratchOffset); +typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureNV)(VkCommandBuffer commandBuffer, VkAccelerationStructureKHR dst, VkAccelerationStructureKHR src, VkCopyAccelerationStructureModeKHR mode); typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysNV)(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth); typedef VkResult (VKAPI_PTR *PFN_vkCreateRayTracingPipelinesNV)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesKHR)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesNV)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); -typedef VkResult (VKAPI_PTR *PFN_vkGetAccelerationStructureHandleNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData); -typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesNV)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); +typedef VkResult (VKAPI_PTR *PFN_vkGetAccelerationStructureHandleNV)(VkDevice device, VkAccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesKHR)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); +typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesNV)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); typedef VkResult (VKAPI_PTR *PFN_vkCompileDeferredNV)(VkDevice device, VkPipeline pipeline, uint32_t shader); #ifndef VK_NO_PROTOTYPES @@ -9491,9 +9420,14 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateAccelerationStructureNV( const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure); +VKAPI_ATTR void VKAPI_CALL vkDestroyAccelerationStructureKHR( + VkDevice device, + VkAccelerationStructureKHR accelerationStructure, + const VkAllocationCallbacks* pAllocator); + VKAPI_ATTR void VKAPI_CALL vkDestroyAccelerationStructureNV( VkDevice device, - VkAccelerationStructureNV accelerationStructure, + VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator); VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureMemoryRequirementsNV( @@ -9501,10 +9435,15 @@ VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureMemoryRequirementsNV( const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements); +VKAPI_ATTR VkResult VKAPI_CALL vkBindAccelerationStructureMemoryKHR( + VkDevice device, + uint32_t bindInfoCount, + const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos); + VKAPI_ATTR VkResult VKAPI_CALL vkBindAccelerationStructureMemoryNV( VkDevice device, uint32_t bindInfoCount, - const VkBindAccelerationStructureMemoryInfoNV* pBindInfos); + const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos); VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, @@ -9512,16 +9451,16 @@ VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructureNV( VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, - VkAccelerationStructureNV dst, - VkAccelerationStructureNV src, + VkAccelerationStructureKHR dst, + VkAccelerationStructureKHR src, VkBuffer scratch, VkDeviceSize scratchOffset); VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, - VkAccelerationStructureNV dst, - VkAccelerationStructureNV src, - VkCopyAccelerationStructureModeNV mode); + VkAccelerationStructureKHR dst, + VkAccelerationStructureKHR src, + VkCopyAccelerationStructureModeKHR mode); VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, @@ -9548,6 +9487,14 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateRayTracingPipelinesNV( const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesKHR( + VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void* pData); + VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesNV( VkDevice device, VkPipeline pipeline, @@ -9558,14 +9505,22 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesNV( VKAPI_ATTR VkResult VKAPI_CALL vkGetAccelerationStructureHandleNV( VkDevice device, - VkAccelerationStructureNV accelerationStructure, + VkAccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData); +VKAPI_ATTR void VKAPI_CALL vkCmdWriteAccelerationStructuresPropertiesKHR( + VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR* pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery); + VKAPI_ATTR void VKAPI_CALL vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, - const VkAccelerationStructureNV* pAccelerationStructures, + const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); @@ -9612,6 +9567,11 @@ typedef struct VkFilterCubicImageViewImageFormatPropertiesEXT { +#define VK_QCOM_render_pass_shader_resolve 1 +#define VK_QCOM_RENDER_PASS_SHADER_RESOLVE_SPEC_VERSION 4 +#define VK_QCOM_RENDER_PASS_SHADER_RESOLVE_EXTENSION_NAME "VK_QCOM_render_pass_shader_resolve" + + #define VK_EXT_global_priority 1 #define VK_EXT_GLOBAL_PRIORITY_SPEC_VERSION 2 #define VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME "VK_EXT_global_priority" @@ -9621,9 +9581,6 @@ typedef enum VkQueueGlobalPriorityEXT { VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT = 256, VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT = 512, VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT = 1024, - VK_QUEUE_GLOBAL_PRIORITY_BEGIN_RANGE_EXT = VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT, - VK_QUEUE_GLOBAL_PRIORITY_END_RANGE_EXT = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT, - VK_QUEUE_GLOBAL_PRIORITY_RANGE_SIZE_EXT = (VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT - VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT + 1), VK_QUEUE_GLOBAL_PRIORITY_MAX_ENUM_EXT = 0x7FFFFFFF } VkQueueGlobalPriorityEXT; typedef struct VkDeviceQueueGlobalPriorityCreateInfoEXT { @@ -9707,9 +9664,6 @@ typedef enum VkTimeDomainEXT { VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT = 1, VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT = 2, VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT = 3, - VK_TIME_DOMAIN_BEGIN_RANGE_EXT = VK_TIME_DOMAIN_DEVICE_EXT, - VK_TIME_DOMAIN_END_RANGE_EXT = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT, - VK_TIME_DOMAIN_RANGE_SIZE_EXT = (VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT - VK_TIME_DOMAIN_DEVICE_EXT + 1), VK_TIME_DOMAIN_MAX_ENUM_EXT = 0x7FFFFFFF } VkTimeDomainEXT; typedef struct VkCalibratedTimestampInfoEXT { @@ -9768,9 +9722,6 @@ typedef enum VkMemoryOverallocationBehaviorAMD { VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD = 0, VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD = 1, VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD = 2, - VK_MEMORY_OVERALLOCATION_BEHAVIOR_BEGIN_RANGE_AMD = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD, - VK_MEMORY_OVERALLOCATION_BEHAVIOR_END_RANGE_AMD = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD, - VK_MEMORY_OVERALLOCATION_BEHAVIOR_RANGE_SIZE_AMD = (VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD - VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD + 1), VK_MEMORY_OVERALLOCATION_BEHAVIOR_MAX_ENUM_AMD = 0x7FFFFFFF } VkMemoryOverallocationBehaviorAMD; typedef struct VkDeviceMemoryOverallocationCreateInfoAMD { @@ -10008,40 +9959,28 @@ typedef struct VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL { #define VK_INTEL_performance_query 1 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPerformanceConfigurationINTEL) -#define VK_INTEL_PERFORMANCE_QUERY_SPEC_VERSION 1 +#define VK_INTEL_PERFORMANCE_QUERY_SPEC_VERSION 2 #define VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME "VK_INTEL_performance_query" typedef enum VkPerformanceConfigurationTypeINTEL { VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL = 0, - VK_PERFORMANCE_CONFIGURATION_TYPE_BEGIN_RANGE_INTEL = VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL, - VK_PERFORMANCE_CONFIGURATION_TYPE_END_RANGE_INTEL = VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL, - VK_PERFORMANCE_CONFIGURATION_TYPE_RANGE_SIZE_INTEL = (VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL - VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL + 1), VK_PERFORMANCE_CONFIGURATION_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF } VkPerformanceConfigurationTypeINTEL; typedef enum VkQueryPoolSamplingModeINTEL { VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL = 0, - VK_QUERY_POOL_SAMPLING_MODE_BEGIN_RANGE_INTEL = VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL, - VK_QUERY_POOL_SAMPLING_MODE_END_RANGE_INTEL = VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL, - VK_QUERY_POOL_SAMPLING_MODE_RANGE_SIZE_INTEL = (VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL - VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL + 1), VK_QUERY_POOL_SAMPLING_MODE_MAX_ENUM_INTEL = 0x7FFFFFFF } VkQueryPoolSamplingModeINTEL; typedef enum VkPerformanceOverrideTypeINTEL { VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL = 0, VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL = 1, - VK_PERFORMANCE_OVERRIDE_TYPE_BEGIN_RANGE_INTEL = VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL, - VK_PERFORMANCE_OVERRIDE_TYPE_END_RANGE_INTEL = VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL, - VK_PERFORMANCE_OVERRIDE_TYPE_RANGE_SIZE_INTEL = (VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL - VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL + 1), VK_PERFORMANCE_OVERRIDE_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF } VkPerformanceOverrideTypeINTEL; typedef enum VkPerformanceParameterTypeINTEL { VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL = 0, VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL = 1, - VK_PERFORMANCE_PARAMETER_TYPE_BEGIN_RANGE_INTEL = VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL, - VK_PERFORMANCE_PARAMETER_TYPE_END_RANGE_INTEL = VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL, - VK_PERFORMANCE_PARAMETER_TYPE_RANGE_SIZE_INTEL = (VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL - VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL + 1), VK_PERFORMANCE_PARAMETER_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF } VkPerformanceParameterTypeINTEL; @@ -10051,9 +9990,6 @@ typedef enum VkPerformanceValueTypeINTEL { VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL = 2, VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL = 3, VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL = 4, - VK_PERFORMANCE_VALUE_TYPE_BEGIN_RANGE_INTEL = VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL, - VK_PERFORMANCE_VALUE_TYPE_END_RANGE_INTEL = VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL, - VK_PERFORMANCE_VALUE_TYPE_RANGE_SIZE_INTEL = (VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL - VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL + 1), VK_PERFORMANCE_VALUE_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF } VkPerformanceValueTypeINTEL; typedef union VkPerformanceValueDataINTEL { @@ -10075,11 +10011,13 @@ typedef struct VkInitializePerformanceApiInfoINTEL { void* pUserData; } VkInitializePerformanceApiInfoINTEL; -typedef struct VkQueryPoolCreateInfoINTEL { +typedef struct VkQueryPoolPerformanceQueryCreateInfoINTEL { VkStructureType sType; const void* pNext; VkQueryPoolSamplingModeINTEL performanceCountersSampling; -} VkQueryPoolCreateInfoINTEL; +} VkQueryPoolPerformanceQueryCreateInfoINTEL; + +typedef VkQueryPoolPerformanceQueryCreateInfoINTEL VkQueryPoolCreateInfoINTEL; typedef struct VkPerformanceMarkerInfoINTEL { VkStructureType sType; @@ -10408,16 +10346,15 @@ typedef VkImageStencilUsageCreateInfo VkImageStencilUsageCreateInfoEXT; #define VK_EXT_validation_features 1 -#define VK_EXT_VALIDATION_FEATURES_SPEC_VERSION 2 +#define VK_EXT_VALIDATION_FEATURES_SPEC_VERSION 4 #define VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME "VK_EXT_validation_features" typedef enum VkValidationFeatureEnableEXT { VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT = 0, VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT = 1, VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT = 2, - VK_VALIDATION_FEATURE_ENABLE_BEGIN_RANGE_EXT = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT, - VK_VALIDATION_FEATURE_ENABLE_END_RANGE_EXT = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT, - VK_VALIDATION_FEATURE_ENABLE_RANGE_SIZE_EXT = (VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT - VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT + 1), + VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT = 3, + VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT = 4, VK_VALIDATION_FEATURE_ENABLE_MAX_ENUM_EXT = 0x7FFFFFFF } VkValidationFeatureEnableEXT; @@ -10429,9 +10366,6 @@ typedef enum VkValidationFeatureDisableEXT { VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT = 4, VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT = 5, VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT = 6, - VK_VALIDATION_FEATURE_DISABLE_BEGIN_RANGE_EXT = VK_VALIDATION_FEATURE_DISABLE_ALL_EXT, - VK_VALIDATION_FEATURE_DISABLE_END_RANGE_EXT = VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT, - VK_VALIDATION_FEATURE_DISABLE_RANGE_SIZE_EXT = (VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT - VK_VALIDATION_FEATURE_DISABLE_ALL_EXT + 1), VK_VALIDATION_FEATURE_DISABLE_MAX_ENUM_EXT = 0x7FFFFFFF } VkValidationFeatureDisableEXT; typedef struct VkValidationFeaturesEXT { @@ -10461,9 +10395,6 @@ typedef enum VkComponentTypeNV { VK_COMPONENT_TYPE_UINT16_NV = 8, VK_COMPONENT_TYPE_UINT32_NV = 9, VK_COMPONENT_TYPE_UINT64_NV = 10, - VK_COMPONENT_TYPE_BEGIN_RANGE_NV = VK_COMPONENT_TYPE_FLOAT16_NV, - VK_COMPONENT_TYPE_END_RANGE_NV = VK_COMPONENT_TYPE_UINT64_NV, - VK_COMPONENT_TYPE_RANGE_SIZE_NV = (VK_COMPONENT_TYPE_UINT64_NV - VK_COMPONENT_TYPE_FLOAT16_NV + 1), VK_COMPONENT_TYPE_MAX_ENUM_NV = 0x7FFFFFFF } VkComponentTypeNV; @@ -10472,9 +10403,6 @@ typedef enum VkScopeNV { VK_SCOPE_WORKGROUP_NV = 2, VK_SCOPE_SUBGROUP_NV = 3, VK_SCOPE_QUEUE_FAMILY_NV = 5, - VK_SCOPE_BEGIN_RANGE_NV = VK_SCOPE_DEVICE_NV, - VK_SCOPE_END_RANGE_NV = VK_SCOPE_QUEUE_FAMILY_NV, - VK_SCOPE_RANGE_SIZE_NV = (VK_SCOPE_QUEUE_FAMILY_NV - VK_SCOPE_DEVICE_NV + 1), VK_SCOPE_MAX_ENUM_NV = 0x7FFFFFFF } VkScopeNV; typedef struct VkCooperativeMatrixPropertiesNV { @@ -10520,9 +10448,6 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( typedef enum VkCoverageReductionModeNV { VK_COVERAGE_REDUCTION_MODE_MERGE_NV = 0, VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV = 1, - VK_COVERAGE_REDUCTION_MODE_BEGIN_RANGE_NV = VK_COVERAGE_REDUCTION_MODE_MERGE_NV, - VK_COVERAGE_REDUCTION_MODE_END_RANGE_NV = VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV, - VK_COVERAGE_REDUCTION_MODE_RANGE_SIZE_NV = (VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV - VK_COVERAGE_REDUCTION_MODE_MERGE_NV + 1), VK_COVERAGE_REDUCTION_MODE_MAX_ENUM_NV = 0x7FFFFFFF } VkCoverageReductionModeNV; typedef VkFlags VkPipelineCoverageReductionStateCreateFlagsNV; @@ -10612,9 +10537,6 @@ typedef enum VkLineRasterizationModeEXT { VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT = 1, VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT = 2, VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT = 3, - VK_LINE_RASTERIZATION_MODE_BEGIN_RANGE_EXT = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT, - VK_LINE_RASTERIZATION_MODE_END_RANGE_EXT = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT, - VK_LINE_RASTERIZATION_MODE_RANGE_SIZE_EXT = (VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT - VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT + 1), VK_LINE_RASTERIZATION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF } VkLineRasterizationModeEXT; typedef struct VkPhysicalDeviceLineRasterizationFeaturesEXT { @@ -10653,6 +10575,28 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEXT( #endif +#define VK_EXT_shader_atomic_float 1 +#define VK_EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION 1 +#define VK_EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME "VK_EXT_shader_atomic_float" +typedef struct VkPhysicalDeviceShaderAtomicFloatFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderBufferFloat32Atomics; + VkBool32 shaderBufferFloat32AtomicAdd; + VkBool32 shaderBufferFloat64Atomics; + VkBool32 shaderBufferFloat64AtomicAdd; + VkBool32 shaderSharedFloat32Atomics; + VkBool32 shaderSharedFloat32AtomicAdd; + VkBool32 shaderSharedFloat64Atomics; + VkBool32 shaderSharedFloat64AtomicAdd; + VkBool32 shaderImageFloat32Atomics; + VkBool32 shaderImageFloat32AtomicAdd; + VkBool32 sparseImageFloat32Atomics; + VkBool32 sparseImageFloat32AtomicAdd; +} VkPhysicalDeviceShaderAtomicFloatFeaturesEXT; + + + #define VK_EXT_host_query_reset 1 #define VK_EXT_HOST_QUERY_RESET_SPEC_VERSION 1 #define VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME "VK_EXT_host_query_reset" @@ -10680,6 +10624,90 @@ typedef struct VkPhysicalDeviceIndexTypeUint8FeaturesEXT { +#define VK_EXT_extended_dynamic_state 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_SPEC_VERSION 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME "VK_EXT_extended_dynamic_state" +typedef struct VkPhysicalDeviceExtendedDynamicStateFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 extendedDynamicState; +} VkPhysicalDeviceExtendedDynamicStateFeaturesEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetCullModeEXT)(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetFrontFaceEXT)(VkCommandBuffer commandBuffer, VkFrontFace frontFace); +typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveTopologyEXT)(VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWithCountEXT)(VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports); +typedef void (VKAPI_PTR *PFN_vkCmdSetScissorWithCountEXT)(VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors); +typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers2EXT)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthWriteEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthCompareOpEXT)(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBoundsTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilOpEXT)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetCullModeEXT( + VkCommandBuffer commandBuffer, + VkCullModeFlags cullMode); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetFrontFaceEXT( + VkCommandBuffer commandBuffer, + VkFrontFace frontFace); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveTopologyEXT( + VkCommandBuffer commandBuffer, + VkPrimitiveTopology primitiveTopology); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWithCountEXT( + VkCommandBuffer commandBuffer, + uint32_t viewportCount, + const VkViewport* pViewports); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetScissorWithCountEXT( + VkCommandBuffer commandBuffer, + uint32_t scissorCount, + const VkRect2D* pScissors); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers2EXT( + VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets, + const VkDeviceSize* pSizes, + const VkDeviceSize* pStrides); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthTestEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthWriteEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthWriteEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthCompareOpEXT( + VkCommandBuffer commandBuffer, + VkCompareOp depthCompareOp); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBoundsTestEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthBoundsTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilTestEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 stencilTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilOpEXT( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp); +#endif + + #define VK_EXT_shader_demote_to_helper_invocation 1 #define VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_SPEC_VERSION 1 #define VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME "VK_EXT_shader_demote_to_helper_invocation" @@ -10691,6 +10719,196 @@ typedef struct VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT { +#define VK_NV_device_generated_commands 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNV) +#define VK_NV_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 3 +#define VK_NV_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_NV_device_generated_commands" + +typedef enum VkIndirectCommandsTokenTypeNV { + VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV = 0, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV = 1, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV = 2, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV = 3, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV = 4, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV = 5, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV = 6, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV = 7, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkIndirectCommandsTokenTypeNV; + +typedef enum VkIndirectStateFlagBitsNV { + VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV = 0x00000001, + VK_INDIRECT_STATE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkIndirectStateFlagBitsNV; +typedef VkFlags VkIndirectStateFlagsNV; + +typedef enum VkIndirectCommandsLayoutUsageFlagBitsNV { + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV = 0x00000001, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV = 0x00000002, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV = 0x00000004, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkIndirectCommandsLayoutUsageFlagBitsNV; +typedef VkFlags VkIndirectCommandsLayoutUsageFlagsNV; +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t maxGraphicsShaderGroupCount; + uint32_t maxIndirectSequenceCount; + uint32_t maxIndirectCommandsTokenCount; + uint32_t maxIndirectCommandsStreamCount; + uint32_t maxIndirectCommandsTokenOffset; + uint32_t maxIndirectCommandsStreamStride; + uint32_t minSequencesCountBufferOffsetAlignment; + uint32_t minSequencesIndexBufferOffsetAlignment; + uint32_t minIndirectCommandsBufferOffsetAlignment; +} VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 deviceGeneratedCommands; +} VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + +typedef struct VkGraphicsShaderGroupCreateInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + const VkPipelineVertexInputStateCreateInfo* pVertexInputState; + const VkPipelineTessellationStateCreateInfo* pTessellationState; +} VkGraphicsShaderGroupCreateInfoNV; + +typedef struct VkGraphicsPipelineShaderGroupsCreateInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t groupCount; + const VkGraphicsShaderGroupCreateInfoNV* pGroups; + uint32_t pipelineCount; + const VkPipeline* pPipelines; +} VkGraphicsPipelineShaderGroupsCreateInfoNV; + +typedef struct VkBindShaderGroupIndirectCommandNV { + uint32_t groupIndex; +} VkBindShaderGroupIndirectCommandNV; + +typedef struct VkBindIndexBufferIndirectCommandNV { + VkDeviceAddress bufferAddress; + uint32_t size; + VkIndexType indexType; +} VkBindIndexBufferIndirectCommandNV; + +typedef struct VkBindVertexBufferIndirectCommandNV { + VkDeviceAddress bufferAddress; + uint32_t size; + uint32_t stride; +} VkBindVertexBufferIndirectCommandNV; + +typedef struct VkSetStateFlagsIndirectCommandNV { + uint32_t data; +} VkSetStateFlagsIndirectCommandNV; + +typedef struct VkIndirectCommandsStreamNV { + VkBuffer buffer; + VkDeviceSize offset; +} VkIndirectCommandsStreamNV; + +typedef struct VkIndirectCommandsLayoutTokenNV { + VkStructureType sType; + const void* pNext; + VkIndirectCommandsTokenTypeNV tokenType; + uint32_t stream; + uint32_t offset; + uint32_t vertexBindingUnit; + VkBool32 vertexDynamicStride; + VkPipelineLayout pushconstantPipelineLayout; + VkShaderStageFlags pushconstantShaderStageFlags; + uint32_t pushconstantOffset; + uint32_t pushconstantSize; + VkIndirectStateFlagsNV indirectStateFlags; + uint32_t indexTypeCount; + const VkIndexType* pIndexTypes; + const uint32_t* pIndexTypeValues; +} VkIndirectCommandsLayoutTokenNV; + +typedef struct VkIndirectCommandsLayoutCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkIndirectCommandsLayoutUsageFlagsNV flags; + VkPipelineBindPoint pipelineBindPoint; + uint32_t tokenCount; + const VkIndirectCommandsLayoutTokenNV* pTokens; + uint32_t streamCount; + const uint32_t* pStreamStrides; +} VkIndirectCommandsLayoutCreateInfoNV; + +typedef struct VkGeneratedCommandsInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineBindPoint pipelineBindPoint; + VkPipeline pipeline; + VkIndirectCommandsLayoutNV indirectCommandsLayout; + uint32_t streamCount; + const VkIndirectCommandsStreamNV* pStreams; + uint32_t sequencesCount; + VkBuffer preprocessBuffer; + VkDeviceSize preprocessOffset; + VkDeviceSize preprocessSize; + VkBuffer sequencesCountBuffer; + VkDeviceSize sequencesCountOffset; + VkBuffer sequencesIndexBuffer; + VkDeviceSize sequencesIndexOffset; +} VkGeneratedCommandsInfoNV; + +typedef struct VkGeneratedCommandsMemoryRequirementsInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineBindPoint pipelineBindPoint; + VkPipeline pipeline; + VkIndirectCommandsLayoutNV indirectCommandsLayout; + uint32_t maxSequencesCount; +} VkGeneratedCommandsMemoryRequirementsInfoNV; + +typedef void (VKAPI_PTR *PFN_vkGetGeneratedCommandsMemoryRequirementsNV)(VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkCmdPreprocessGeneratedCommandsNV)(VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdExecuteGeneratedCommandsNV)(VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBindPipelineShaderGroupNV)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline, uint32_t groupIndex); +typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutNV)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNV* pIndirectCommandsLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutNV)(VkDevice device, VkIndirectCommandsLayoutNV indirectCommandsLayout, const VkAllocationCallbacks* pAllocator); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetGeneratedCommandsMemoryRequirementsNV( + VkDevice device, + const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkCmdPreprocessGeneratedCommandsNV( + VkCommandBuffer commandBuffer, + const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdExecuteGeneratedCommandsNV( + VkCommandBuffer commandBuffer, + VkBool32 isPreprocessed, + const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindPipelineShaderGroupNV( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline, + uint32_t groupIndex); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutNV( + VkDevice device, + const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkIndirectCommandsLayoutNV* pIndirectCommandsLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutNV( + VkDevice device, + VkIndirectCommandsLayoutNV indirectCommandsLayout, + const VkAllocationCallbacks* pAllocator); +#endif + + #define VK_EXT_texel_buffer_alignment 1 #define VK_EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION 1 #define VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME "VK_EXT_texel_buffer_alignment" @@ -10711,10 +10929,217 @@ typedef struct VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT { +#define VK_QCOM_render_pass_transform 1 +#define VK_QCOM_RENDER_PASS_TRANSFORM_SPEC_VERSION 1 +#define VK_QCOM_RENDER_PASS_TRANSFORM_EXTENSION_NAME "VK_QCOM_render_pass_transform" +typedef struct VkRenderPassTransformBeginInfoQCOM { + VkStructureType sType; + void* pNext; + VkSurfaceTransformFlagBitsKHR transform; +} VkRenderPassTransformBeginInfoQCOM; + +typedef struct VkCommandBufferInheritanceRenderPassTransformInfoQCOM { + VkStructureType sType; + void* pNext; + VkSurfaceTransformFlagBitsKHR transform; + VkRect2D renderArea; +} VkCommandBufferInheritanceRenderPassTransformInfoQCOM; + + + +#define VK_EXT_robustness2 1 +#define VK_EXT_ROBUSTNESS_2_SPEC_VERSION 1 +#define VK_EXT_ROBUSTNESS_2_EXTENSION_NAME "VK_EXT_robustness2" +typedef struct VkPhysicalDeviceRobustness2FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 robustBufferAccess2; + VkBool32 robustImageAccess2; + VkBool32 nullDescriptor; +} VkPhysicalDeviceRobustness2FeaturesEXT; + +typedef struct VkPhysicalDeviceRobustness2PropertiesEXT { + VkStructureType sType; + void* pNext; + VkDeviceSize robustStorageBufferAccessSizeAlignment; + VkDeviceSize robustUniformBufferAccessSizeAlignment; +} VkPhysicalDeviceRobustness2PropertiesEXT; + + + +#define VK_EXT_custom_border_color 1 +#define VK_EXT_CUSTOM_BORDER_COLOR_SPEC_VERSION 12 +#define VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME "VK_EXT_custom_border_color" +typedef struct VkSamplerCustomBorderColorCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkClearColorValue customBorderColor; + VkFormat format; +} VkSamplerCustomBorderColorCreateInfoEXT; + +typedef struct VkPhysicalDeviceCustomBorderColorPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxCustomBorderColorSamplers; +} VkPhysicalDeviceCustomBorderColorPropertiesEXT; + +typedef struct VkPhysicalDeviceCustomBorderColorFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 customBorderColors; + VkBool32 customBorderColorWithoutFormat; +} VkPhysicalDeviceCustomBorderColorFeaturesEXT; + + + #define VK_GOOGLE_user_type 1 #define VK_GOOGLE_USER_TYPE_SPEC_VERSION 1 #define VK_GOOGLE_USER_TYPE_EXTENSION_NAME "VK_GOOGLE_user_type" + +#define VK_EXT_private_data 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPrivateDataSlotEXT) +#define VK_EXT_PRIVATE_DATA_SPEC_VERSION 1 +#define VK_EXT_PRIVATE_DATA_EXTENSION_NAME "VK_EXT_private_data" + +typedef enum VkPrivateDataSlotCreateFlagBitsEXT { + VK_PRIVATE_DATA_SLOT_CREATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkPrivateDataSlotCreateFlagBitsEXT; +typedef VkFlags VkPrivateDataSlotCreateFlagsEXT; +typedef struct VkPhysicalDevicePrivateDataFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 privateData; +} VkPhysicalDevicePrivateDataFeaturesEXT; + +typedef struct VkDevicePrivateDataCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t privateDataSlotRequestCount; +} VkDevicePrivateDataCreateInfoEXT; + +typedef struct VkPrivateDataSlotCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPrivateDataSlotCreateFlagsEXT flags; +} VkPrivateDataSlotCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreatePrivateDataSlotEXT)(VkDevice device, const VkPrivateDataSlotCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlotEXT* pPrivateDataSlot); +typedef void (VKAPI_PTR *PFN_vkDestroyPrivateDataSlotEXT)(VkDevice device, VkPrivateDataSlotEXT privateDataSlot, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkSetPrivateDataEXT)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlotEXT privateDataSlot, uint64_t data); +typedef void (VKAPI_PTR *PFN_vkGetPrivateDataEXT)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlotEXT privateDataSlot, uint64_t* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePrivateDataSlotEXT( + VkDevice device, + const VkPrivateDataSlotCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPrivateDataSlotEXT* pPrivateDataSlot); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPrivateDataSlotEXT( + VkDevice device, + VkPrivateDataSlotEXT privateDataSlot, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetPrivateDataEXT( + VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlotEXT privateDataSlot, + uint64_t data); + +VKAPI_ATTR void VKAPI_CALL vkGetPrivateDataEXT( + VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlotEXT privateDataSlot, + uint64_t* pData); +#endif + + +#define VK_EXT_pipeline_creation_cache_control 1 +#define VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_SPEC_VERSION 3 +#define VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_EXTENSION_NAME "VK_EXT_pipeline_creation_cache_control" +typedef struct VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 pipelineCreationCacheControl; +} VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT; + + + +#define VK_NV_device_diagnostics_config 1 +#define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_SPEC_VERSION 1 +#define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_EXTENSION_NAME "VK_NV_device_diagnostics_config" + +typedef enum VkDeviceDiagnosticsConfigFlagBitsNV { + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV = 0x00000001, + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV = 0x00000002, + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV = 0x00000004, + VK_DEVICE_DIAGNOSTICS_CONFIG_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkDeviceDiagnosticsConfigFlagBitsNV; +typedef VkFlags VkDeviceDiagnosticsConfigFlagsNV; +typedef struct VkPhysicalDeviceDiagnosticsConfigFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 diagnosticsConfig; +} VkPhysicalDeviceDiagnosticsConfigFeaturesNV; + +typedef struct VkDeviceDiagnosticsConfigCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkDeviceDiagnosticsConfigFlagsNV flags; +} VkDeviceDiagnosticsConfigCreateInfoNV; + + + +#define VK_QCOM_render_pass_store_ops 1 +#define VK_QCOM_render_pass_store_ops_SPEC_VERSION 2 +#define VK_QCOM_render_pass_store_ops_EXTENSION_NAME "VK_QCOM_render_pass_store_ops" + + +#define VK_EXT_fragment_density_map2 1 +#define VK_EXT_FRAGMENT_DENSITY_MAP_2_SPEC_VERSION 1 +#define VK_EXT_FRAGMENT_DENSITY_MAP_2_EXTENSION_NAME "VK_EXT_fragment_density_map2" +typedef struct VkPhysicalDeviceFragmentDensityMap2FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 fragmentDensityMapDeferred; +} VkPhysicalDeviceFragmentDensityMap2FeaturesEXT; + +typedef struct VkPhysicalDeviceFragmentDensityMap2PropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 subsampledLoads; + VkBool32 subsampledCoarseReconstructionEarlyAccess; + uint32_t maxSubsampledArrayLayers; + uint32_t maxDescriptorSetSubsampledSamplers; +} VkPhysicalDeviceFragmentDensityMap2PropertiesEXT; + + + +#define VK_EXT_image_robustness 1 +#define VK_EXT_IMAGE_ROBUSTNESS_SPEC_VERSION 1 +#define VK_EXT_IMAGE_ROBUSTNESS_EXTENSION_NAME "VK_EXT_image_robustness" +typedef struct VkPhysicalDeviceImageRobustnessFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 robustImageAccess; +} VkPhysicalDeviceImageRobustnessFeaturesEXT; + + + +#define VK_EXT_4444_formats 1 +#define VK_EXT_4444_FORMATS_SPEC_VERSION 1 +#define VK_EXT_4444_FORMATS_EXTENSION_NAME "VK_EXT_4444_formats" +typedef struct VkPhysicalDevice4444FormatsFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 formatA4R4G4B4; + VkBool32 formatA4B4G4R4; +} VkPhysicalDevice4444FormatsFeaturesEXT; + + #ifdef __cplusplus } #endif diff --git a/thirdparty/vulkan/include/vulkan/vulkan_directfb.h b/thirdparty/vulkan/include/vulkan/vulkan_directfb.h new file mode 100644 index 00000000000..f75bd3a4e1f --- /dev/null +++ b/thirdparty/vulkan/include/vulkan/vulkan_directfb.h @@ -0,0 +1,54 @@ +#ifndef VULKAN_DIRECTFB_H_ +#define VULKAN_DIRECTFB_H_ 1 + +/* +** Copyright (c) 2015-2020 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +#define VK_EXT_directfb_surface 1 +#define VK_EXT_DIRECTFB_SURFACE_SPEC_VERSION 1 +#define VK_EXT_DIRECTFB_SURFACE_EXTENSION_NAME "VK_EXT_directfb_surface" +typedef VkFlags VkDirectFBSurfaceCreateFlagsEXT; +typedef struct VkDirectFBSurfaceCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDirectFBSurfaceCreateFlagsEXT flags; + IDirectFB* dfb; + IDirectFBSurface* surface; +} VkDirectFBSurfaceCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateDirectFBSurfaceEXT)(VkInstance instance, const VkDirectFBSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, IDirectFB* dfb); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDirectFBSurfaceEXT( + VkInstance instance, + const VkDirectFBSurfaceCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceDirectFBPresentationSupportEXT( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + IDirectFB* dfb); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/thirdparty/vulkan/include/vulkan/vulkan_fuchsia.h b/thirdparty/vulkan/include/vulkan/vulkan_fuchsia.h index 81ebe55d313..03e27cb0afe 100644 --- a/thirdparty/vulkan/include/vulkan/vulkan_fuchsia.h +++ b/thirdparty/vulkan/include/vulkan/vulkan_fuchsia.h @@ -2,19 +2,9 @@ #define VULKAN_FUCHSIA_H_ 1 /* -** Copyright (c) 2015-2019 The Khronos Group Inc. +** Copyright (c) 2015-2020 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ /* diff --git a/thirdparty/vulkan/include/vulkan/vulkan_ggp.h b/thirdparty/vulkan/include/vulkan/vulkan_ggp.h index fd306131c32..273c88005b5 100644 --- a/thirdparty/vulkan/include/vulkan/vulkan_ggp.h +++ b/thirdparty/vulkan/include/vulkan/vulkan_ggp.h @@ -2,19 +2,9 @@ #define VULKAN_GGP_H_ 1 /* -** Copyright (c) 2015-2019 The Khronos Group Inc. +** Copyright (c) 2015-2020 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ /* diff --git a/thirdparty/vulkan/include/vulkan/vulkan_ios.h b/thirdparty/vulkan/include/vulkan/vulkan_ios.h index 72ef1a8a825..446a2698d91 100644 --- a/thirdparty/vulkan/include/vulkan/vulkan_ios.h +++ b/thirdparty/vulkan/include/vulkan/vulkan_ios.h @@ -2,19 +2,9 @@ #define VULKAN_IOS_H_ 1 /* -** Copyright (c) 2015-2019 The Khronos Group Inc. +** Copyright (c) 2015-2020 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ /* @@ -30,7 +20,7 @@ extern "C" { #define VK_MVK_ios_surface 1 -#define VK_MVK_IOS_SURFACE_SPEC_VERSION 2 +#define VK_MVK_IOS_SURFACE_SPEC_VERSION 3 #define VK_MVK_IOS_SURFACE_EXTENSION_NAME "VK_MVK_ios_surface" typedef VkFlags VkIOSSurfaceCreateFlagsMVK; typedef struct VkIOSSurfaceCreateInfoMVK { diff --git a/thirdparty/vulkan/include/vulkan/vulkan_macos.h b/thirdparty/vulkan/include/vulkan/vulkan_macos.h index e6e5deaa366..35fcabe39b1 100644 --- a/thirdparty/vulkan/include/vulkan/vulkan_macos.h +++ b/thirdparty/vulkan/include/vulkan/vulkan_macos.h @@ -2,19 +2,9 @@ #define VULKAN_MACOS_H_ 1 /* -** Copyright (c) 2015-2019 The Khronos Group Inc. +** Copyright (c) 2015-2020 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ /* @@ -30,7 +20,7 @@ extern "C" { #define VK_MVK_macos_surface 1 -#define VK_MVK_MACOS_SURFACE_SPEC_VERSION 2 +#define VK_MVK_MACOS_SURFACE_SPEC_VERSION 3 #define VK_MVK_MACOS_SURFACE_EXTENSION_NAME "VK_MVK_macos_surface" typedef VkFlags VkMacOSSurfaceCreateFlagsMVK; typedef struct VkMacOSSurfaceCreateInfoMVK { diff --git a/thirdparty/vulkan/include/vulkan/vulkan_metal.h b/thirdparty/vulkan/include/vulkan/vulkan_metal.h index 3dec68c7713..99f097d954a 100644 --- a/thirdparty/vulkan/include/vulkan/vulkan_metal.h +++ b/thirdparty/vulkan/include/vulkan/vulkan_metal.h @@ -2,19 +2,9 @@ #define VULKAN_METAL_H_ 1 /* -** Copyright (c) 2015-2019 The Khronos Group Inc. +** Copyright (c) 2015-2020 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ /* diff --git a/thirdparty/vulkan/include/vulkan/vulkan_vi.h b/thirdparty/vulkan/include/vulkan/vulkan_vi.h index 6fb66f9dd28..2e62d7d3a74 100644 --- a/thirdparty/vulkan/include/vulkan/vulkan_vi.h +++ b/thirdparty/vulkan/include/vulkan/vulkan_vi.h @@ -2,19 +2,9 @@ #define VULKAN_VI_H_ 1 /* -** Copyright (c) 2015-2019 The Khronos Group Inc. +** Copyright (c) 2015-2020 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ /* diff --git a/thirdparty/vulkan/include/vulkan/vulkan_wayland.h b/thirdparty/vulkan/include/vulkan/vulkan_wayland.h index 599d05b24a5..f7b307e519b 100644 --- a/thirdparty/vulkan/include/vulkan/vulkan_wayland.h +++ b/thirdparty/vulkan/include/vulkan/vulkan_wayland.h @@ -2,19 +2,9 @@ #define VULKAN_WAYLAND_H_ 1 /* -** Copyright (c) 2015-2019 The Khronos Group Inc. +** Copyright (c) 2015-2020 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ /* diff --git a/thirdparty/vulkan/include/vulkan/vulkan_win32.h b/thirdparty/vulkan/include/vulkan/vulkan_win32.h index 20a1dc0e588..4b561ea10a5 100644 --- a/thirdparty/vulkan/include/vulkan/vulkan_win32.h +++ b/thirdparty/vulkan/include/vulkan/vulkan_win32.h @@ -2,19 +2,9 @@ #define VULKAN_WIN32_H_ 1 /* -** Copyright (c) 2015-2019 The Khronos Group Inc. +** Copyright (c) 2015-2020 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ /* @@ -272,9 +262,6 @@ typedef enum VkFullScreenExclusiveEXT { VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT = 1, VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT = 2, VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT = 3, - VK_FULL_SCREEN_EXCLUSIVE_BEGIN_RANGE_EXT = VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT, - VK_FULL_SCREEN_EXCLUSIVE_END_RANGE_EXT = VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT, - VK_FULL_SCREEN_EXCLUSIVE_RANGE_SIZE_EXT = (VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT - VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT + 1), VK_FULL_SCREEN_EXCLUSIVE_MAX_ENUM_EXT = 0x7FFFFFFF } VkFullScreenExclusiveEXT; typedef struct VkSurfaceFullScreenExclusiveInfoEXT { diff --git a/thirdparty/vulkan/include/vulkan/vulkan_xcb.h b/thirdparty/vulkan/include/vulkan/vulkan_xcb.h index 4cc0bc0cec5..c5441b239d0 100644 --- a/thirdparty/vulkan/include/vulkan/vulkan_xcb.h +++ b/thirdparty/vulkan/include/vulkan/vulkan_xcb.h @@ -2,19 +2,9 @@ #define VULKAN_XCB_H_ 1 /* -** Copyright (c) 2015-2019 The Khronos Group Inc. +** Copyright (c) 2015-2020 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ /* diff --git a/thirdparty/vulkan/include/vulkan/vulkan_xlib.h b/thirdparty/vulkan/include/vulkan/vulkan_xlib.h index ee2b48accb0..c54628a7ec3 100644 --- a/thirdparty/vulkan/include/vulkan/vulkan_xlib.h +++ b/thirdparty/vulkan/include/vulkan/vulkan_xlib.h @@ -2,19 +2,9 @@ #define VULKAN_XLIB_H_ 1 /* -** Copyright (c) 2015-2019 The Khronos Group Inc. +** Copyright (c) 2015-2020 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ /* diff --git a/thirdparty/vulkan/include/vulkan/vulkan_xlib_xrandr.h b/thirdparty/vulkan/include/vulkan/vulkan_xlib_xrandr.h index 08c4fd729cd..436432f8428 100644 --- a/thirdparty/vulkan/include/vulkan/vulkan_xlib_xrandr.h +++ b/thirdparty/vulkan/include/vulkan/vulkan_xlib_xrandr.h @@ -2,19 +2,9 @@ #define VULKAN_XLIB_XRANDR_H_ 1 /* -** Copyright (c) 2015-2019 The Khronos Group Inc. +** Copyright (c) 2015-2020 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ /* diff --git a/thirdparty/vulkan/loader/dxgi_loader.c b/thirdparty/vulkan/loader/dxgi_loader.c deleted file mode 100644 index c2a3fa56385..00000000000 --- a/thirdparty/vulkan/loader/dxgi_loader.c +++ /dev/null @@ -1,23 +0,0 @@ -#include "dxgi_loader.h" - -#include - -static HMODULE load_dxgi_module() { - TCHAR systemPath[MAX_PATH] = ""; - GetSystemDirectory(systemPath, MAX_PATH); - StringCchCat(systemPath, MAX_PATH, TEXT("\\dxgi.dll")); - - return LoadLibrary(systemPath); -} - -typedef HRESULT (APIENTRY *PFN_CreateDXGIFactory1)(REFIID riid, void **ppFactory); - -HRESULT dyn_CreateDXGIFactory1(REFIID riid, void **ppFactory) { - PFN_CreateDXGIFactory1 fpCreateDXGIFactory1 = - (PFN_CreateDXGIFactory1)GetProcAddress(load_dxgi_module(), "CreateDXGIFactory1"); - - if (fpCreateDXGIFactory1 != NULL) - return fpCreateDXGIFactory1(riid, ppFactory); - - return DXGI_ERROR_NOT_FOUND; -} \ No newline at end of file diff --git a/thirdparty/vulkan/loader/dxgi_loader.h b/thirdparty/vulkan/loader/dxgi_loader.h deleted file mode 100644 index 00daf08ed26..00000000000 --- a/thirdparty/vulkan/loader/dxgi_loader.h +++ /dev/null @@ -1,8 +0,0 @@ -#ifndef DXGI_LOADER_H -#define DXGI_LOADER_H - -#include - -HRESULT dyn_CreateDXGIFactory1(REFIID riid, void **ppFactory); - -#endif \ No newline at end of file diff --git a/thirdparty/vulkan/loader/extension_manual.c b/thirdparty/vulkan/loader/extension_manual.c index b8118fdc8a6..fab25813f53 100644 --- a/thirdparty/vulkan/loader/extension_manual.c +++ b/thirdparty/vulkan/loader/extension_manual.c @@ -37,78 +37,6 @@ // These functions, for whatever reason, require more complex changes than // can easily be automatically generated. -// ---- VK_KHR_device_group extension trampoline/terminators - -VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilities2KHR(VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo, - VkSurfaceCapabilities2KHR *pSurfaceCapabilities) { - const VkLayerInstanceDispatchTable *disp; - VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice); - disp = loader_get_instance_layer_dispatch(physicalDevice); - return disp->GetPhysicalDeviceSurfaceCapabilities2KHR(unwrapped_phys_dev, pSurfaceInfo, pSurfaceCapabilities); -} - -VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceSurfaceCapabilities2KHR( - VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo, - VkSurfaceCapabilities2KHR *pSurfaceCapabilities) { - struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice; - struct loader_icd_term *icd_term = phys_dev_term->this_icd_term; - - VkIcdSurface *icd_surface = (VkIcdSurface *)(pSurfaceInfo->surface); - uint8_t icd_index = phys_dev_term->icd_index; - - if (icd_term->dispatch.GetPhysicalDeviceSurfaceCapabilities2KHR != NULL) { - VkBaseOutStructure *pNext = (VkBaseOutStructure *)pSurfaceCapabilities->pNext; - while (pNext != NULL) { - if ((int)pNext->sType == VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR) { - // Not all ICDs may be supporting VK_KHR_surface_protected_capabilities - // Initialize VkSurfaceProtectedCapabilitiesKHR.supportsProtected to false and - // if an ICD supports protected surfaces, it will reset it to true accordingly. - ((VkSurfaceProtectedCapabilitiesKHR *)pNext)->supportsProtected = VK_FALSE; - } - pNext = (VkBaseOutStructure *)pNext->pNext; - } - - // Pass the call to the driver, possibly unwrapping the ICD surface - if (icd_surface->real_icd_surfaces != NULL && (void *)icd_surface->real_icd_surfaces[icd_index] != NULL) { - VkPhysicalDeviceSurfaceInfo2KHR info_copy = *pSurfaceInfo; - info_copy.surface = icd_surface->real_icd_surfaces[icd_index]; - return icd_term->dispatch.GetPhysicalDeviceSurfaceCapabilities2KHR(phys_dev_term->phys_dev, &info_copy, - pSurfaceCapabilities); - } else { - return icd_term->dispatch.GetPhysicalDeviceSurfaceCapabilities2KHR(phys_dev_term->phys_dev, pSurfaceInfo, - pSurfaceCapabilities); - } - } else { - // Emulate the call - loader_log(icd_term->this_instance, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0, - "vkGetPhysicalDeviceSurfaceCapabilities2KHR: Emulating call in ICD \"%s\" using " - "vkGetPhysicalDeviceSurfaceCapabilitiesKHR", - icd_term->scanned_icd->lib_name); - - if (pSurfaceInfo->pNext != NULL) { - loader_log(icd_term->this_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, - "vkGetPhysicalDeviceSurfaceCapabilities2KHR: Emulation found unrecognized structure type in " - "pSurfaceInfo->pNext - this struct will be ignored"); - } - - // Write to the VkSurfaceCapabilities2KHR struct - VkSurfaceKHR surface = pSurfaceInfo->surface; - if (icd_surface->real_icd_surfaces != NULL && (void *)icd_surface->real_icd_surfaces[icd_index] != NULL) { - surface = icd_surface->real_icd_surfaces[icd_index]; - } - VkResult res = icd_term->dispatch.GetPhysicalDeviceSurfaceCapabilitiesKHR(phys_dev_term->phys_dev, surface, - &pSurfaceCapabilities->surfaceCapabilities); - - if (pSurfaceCapabilities->pNext != NULL) { - loader_log(icd_term->this_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, - "vkGetPhysicalDeviceSurfaceCapabilities2KHR: Emulation found unrecognized structure type in " - "pSurfaceCapabilities->pNext - this struct will be ignored"); - } - return res; - } -} - // ---- VK_NV_external_memory_capabilities extension trampoline/terminators VKAPI_ATTR VkResult VKAPI_CALL @@ -159,81 +87,6 @@ terminator_GetPhysicalDeviceExternalImageFormatPropertiesNV( externalHandleType, pExternalImageFormatProperties); } -VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo, - uint32_t *pSurfaceFormatCount, - VkSurfaceFormat2KHR *pSurfaceFormats) { - const VkLayerInstanceDispatchTable *disp; - VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice); - disp = loader_get_instance_layer_dispatch(physicalDevice); - return disp->GetPhysicalDeviceSurfaceFormats2KHR(unwrapped_phys_dev, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats); -} - -VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo, - uint32_t *pSurfaceFormatCount, - VkSurfaceFormat2KHR *pSurfaceFormats) { - struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice; - struct loader_icd_term *icd_term = phys_dev_term->this_icd_term; - - VkIcdSurface *icd_surface = (VkIcdSurface *)(pSurfaceInfo->surface); - uint8_t icd_index = phys_dev_term->icd_index; - - if (icd_term->dispatch.GetPhysicalDeviceSurfaceFormats2KHR != NULL) { - // Pass the call to the driver, possibly unwrapping the ICD surface - if (icd_surface->real_icd_surfaces != NULL && (void *)icd_surface->real_icd_surfaces[icd_index] != NULL) { - VkPhysicalDeviceSurfaceInfo2KHR info_copy = *pSurfaceInfo; - info_copy.surface = icd_surface->real_icd_surfaces[icd_index]; - return icd_term->dispatch.GetPhysicalDeviceSurfaceFormats2KHR(phys_dev_term->phys_dev, &info_copy, pSurfaceFormatCount, - pSurfaceFormats); - } else { - return icd_term->dispatch.GetPhysicalDeviceSurfaceFormats2KHR(phys_dev_term->phys_dev, pSurfaceInfo, - pSurfaceFormatCount, pSurfaceFormats); - } - } else { - // Emulate the call - loader_log(icd_term->this_instance, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0, - "vkGetPhysicalDeviceSurfaceFormats2KHR: Emulating call in ICD \"%s\" using vkGetPhysicalDeviceSurfaceFormatsKHR", - icd_term->scanned_icd->lib_name); - - if (pSurfaceInfo->pNext != NULL) { - loader_log(icd_term->this_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, - "vkGetPhysicalDeviceSurfaceFormats2KHR: Emulation found unrecognized structure type in pSurfaceInfo->pNext " - "- this struct will be ignored"); - } - - VkSurfaceKHR surface = pSurfaceInfo->surface; - if (icd_surface->real_icd_surfaces != NULL && (void *)icd_surface->real_icd_surfaces[icd_index] != NULL) { - surface = icd_surface->real_icd_surfaces[icd_index]; - } - - if (*pSurfaceFormatCount == 0 || pSurfaceFormats == NULL) { - // Write to pSurfaceFormatCount - return icd_term->dispatch.GetPhysicalDeviceSurfaceFormatsKHR(phys_dev_term->phys_dev, surface, pSurfaceFormatCount, - NULL); - } else { - // Allocate a temporary array for the output of the old function - VkSurfaceFormatKHR *formats = loader_stack_alloc(*pSurfaceFormatCount * sizeof(VkSurfaceFormatKHR)); - if (formats == NULL) { - return VK_ERROR_OUT_OF_HOST_MEMORY; - } - - VkResult res = icd_term->dispatch.GetPhysicalDeviceSurfaceFormatsKHR(phys_dev_term->phys_dev, surface, - pSurfaceFormatCount, formats); - for (uint32_t i = 0; i < *pSurfaceFormatCount; ++i) { - pSurfaceFormats[i].surfaceFormat = formats[i]; - if (pSurfaceFormats[i].pNext != NULL) { - loader_log(icd_term->this_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, - "vkGetPhysicalDeviceSurfaceFormats2KHR: Emulation found unrecognized structure type in " - "pSurfaceFormats[%d].pNext - this struct will be ignored", - i); - } - } - return res; - } - } -} - // ---- VK_EXT_display_surface_counter extension trampoline/terminators VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, diff --git a/thirdparty/vulkan/loader/extension_manual.h b/thirdparty/vulkan/loader/extension_manual.h index fe4287effb5..b60a799eb2e 100644 --- a/thirdparty/vulkan/loader/extension_manual.h +++ b/thirdparty/vulkan/loader/extension_manual.h @@ -39,24 +39,6 @@ terminator_GetPhysicalDeviceExternalImageFormatPropertiesNV( VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV *pExternalImageFormatProperties); -VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilities2KHR(VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, - VkSurfaceCapabilities2KHR* pSurfaceCapabilities); - -VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceSurfaceCapabilities2KHR( - VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, - VkSurfaceCapabilities2KHR* pSurfaceCapabilities); - -VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, - uint32_t* pSurfaceFormatCount, - VkSurfaceFormat2KHR* pSurfaceFormats); - -VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, - uint32_t* pSurfaceFormatCount, - VkSurfaceFormat2KHR* pSurfaceFormats); - VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities); diff --git a/thirdparty/vulkan/loader/gpa_helper.h b/thirdparty/vulkan/loader/gpa_helper.h index 832b68c3efa..01dec6fa727 100644 --- a/thirdparty/vulkan/loader/gpa_helper.h +++ b/thirdparty/vulkan/loader/gpa_helper.h @@ -229,6 +229,7 @@ static inline void *globalGetProcAddr(const char *name) { if (!strcmp(name, "EnumerateInstanceExtensionProperties")) return vkEnumerateInstanceExtensionProperties; if (!strcmp(name, "EnumerateInstanceLayerProperties")) return vkEnumerateInstanceLayerProperties; if (!strcmp(name, "EnumerateInstanceVersion")) return vkEnumerateInstanceVersion; + if (!strcmp(name, "GetInstanceProcAddr")) return vkGetInstanceProcAddr; return NULL; } diff --git a/thirdparty/vulkan/loader/loader.c b/thirdparty/vulkan/loader/loader.c index c7cdb471229..418b88e30ae 100644 --- a/thirdparty/vulkan/loader/loader.c +++ b/thirdparty/vulkan/loader/loader.c @@ -70,8 +70,12 @@ #include #include #include +#include +#include #include "adapters.h" -#include "dxgi_loader.h" + +typedef HRESULT (APIENTRY *PFN_CreateDXGIFactory1)(REFIID riid, void **ppFactory); +static PFN_CreateDXGIFactory1 fpCreateDXGIFactory1; #endif // This is a CMake generated file with #defines for any functions/includes @@ -84,6 +88,10 @@ // Generated file containing all the extension data #include "vk_loader_extensions.c" +// Environment Variable information +#define VK_ICD_FILENAMES_ENV_VAR "VK_ICD_FILENAMES" +#define VK_LAYER_PATH_ENV_VAR "VK_LAYER_PATH" + // Override layer information #define VK_OVERRIDE_LAYER_NAME "VK_LAYER_LUNARG_override" @@ -121,13 +129,17 @@ enum loader_data_files_type { // additionally CreateDevice and DestroyDevice needs to be locked loader_platform_thread_mutex loader_lock; loader_platform_thread_mutex loader_json_lock; +loader_platform_thread_mutex loader_preload_icd_lock; + +// A list of ICDs that gets initialized when the loader does its global initialization. This list should never be used by anything +// other than EnumerateInstanceExtensionProperties(), vkDestroyInstance, and loader_release(). This list does not change +// functionality, but the fact that the libraries already been loaded causes any call that needs to load ICD libraries to speed up +// significantly. This can have a huge impact when making repeated calls to vkEnumerateInstanceExtensionProperties and +// vkCreateInstance. +static struct loader_icd_tramp_list scanned_icds; LOADER_PLATFORM_THREAD_ONCE_DECLARATION(once_init); -// This loader supports Vulkan API version 1.1 -uint32_t loader_major_version = 1; -uint32_t loader_minor_version = 2; - void *loader_instance_heap_alloc(const struct loader_instance *instance, size_t size, VkSystemAllocationScope alloc_scope) { void *pMemory = NULL; #if (DEBUG_DISABLE_APP_ALLOCATORS == 1) @@ -400,7 +412,7 @@ void loader_log(const struct loader_instance *inst, VkFlags msg_type, int32_t ms } else if ((msg_type & LOADER_ERROR_BIT) != 0) { severity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT; } else if ((msg_type & LOADER_DEBUG_BIT) != 0) { - severity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT; + severity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT; } if ((msg_type & LOADER_PERF_BIT) != 0) { @@ -876,7 +888,7 @@ VkResult loaderGetRegistryFiles(const struct loader_instance *inst, char *locati } if (is_driver) { - HRESULT hres = dyn_CreateDXGIFactory1(&IID_IDXGIFactory1, &dxgi_factory); + HRESULT hres = fpCreateDXGIFactory1(&IID_IDXGIFactory1, (void **)&dxgi_factory); if (hres != S_OK) { loader_log( inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, @@ -1078,11 +1090,11 @@ static size_t loader_platform_combine_path(char *dest, size_t len, ...) { // Given string of three part form "maj.min.pat" convert to a vulkan version number. static uint32_t loader_make_version(char *vers_str) { - uint32_t vers = 0, major = 0, minor = 0, patch = 0; + uint32_t major = 0, minor = 0, patch = 0; char *vers_tok; if (!vers_str) { - return vers; + return 0; } vers_tok = strtok(vers_str, ".\"\n\r"); @@ -1230,6 +1242,10 @@ void loaderDeleteLayerListAndProperties(const struct loader_instance *inst, stru loader_instance_heap_free(inst, layer_list->list[i].override_paths); layer_list->list[i].override_paths = NULL; } + if (NULL != layer_list->list[i].app_key_paths) { + loader_instance_heap_free(inst, layer_list->list[i].app_key_paths); + layer_list->list[i].app_key_paths = NULL; + } loader_destroy_generic_list(inst, (struct loader_generic_list *)&layer_list->list[i].instance_extension_list); dev_ext_list = &layer_list->list[i].device_extension_list; if (dev_ext_list->capacity > 0 && NULL != dev_ext_list->list) { @@ -1253,6 +1269,31 @@ void loaderDeleteLayerListAndProperties(const struct loader_instance *inst, stru } } +void loaderRemoveLayerInList(const struct loader_instance *inst, struct loader_layer_list *layer_list, uint32_t layer_to_remove) { + if (layer_list == NULL || layer_to_remove >= layer_list->count) { + return; + } + if (layer_list->list[layer_to_remove].type_flags & VK_LAYER_TYPE_FLAG_META_LAYER) { + // Delete the component layers + loader_instance_heap_free(inst, layer_list->list[layer_to_remove].component_layer_names); + loader_instance_heap_free(inst, layer_list->list[layer_to_remove].override_paths); + loader_instance_heap_free(inst, layer_list->list[layer_to_remove].blacklist_layer_names); + loader_instance_heap_free(inst, layer_list->list[layer_to_remove].app_key_paths); + } + + // Remove the current invalid meta-layer from the layer list. Use memmove since we are + // overlapping the source and destination addresses. + memmove(&layer_list->list[layer_to_remove], &layer_list->list[layer_to_remove + 1], + sizeof(struct loader_layer_properties) * (layer_list->count - 1 - layer_to_remove)); + + // Make sure to clear out the removed layer, in case new layers are added in the previous location + memset(&layer_list->list[layer_list->count - 1], 0, sizeof(struct loader_layer_properties)); + + // Decrement the count (because we now have one less) and decrement the loop index since we need to + // re-check this index. + layer_list->count--; +} + // Remove all layers in the layer list that are blacklisted by the override layer. // NOTE: This should only be called if an override layer is found and not expired. void loaderRemoveLayersInBlacklist(const struct loader_instance *inst, struct loader_layer_list *layer_list) { @@ -1925,9 +1966,10 @@ bool loaderAddMetaLayer(const struct loader_instance *inst, const struct loader_ // Search the source_list for any layer with a name that matches the given name and a type // that matches the given type. Add all matching layers to the target_list. // Do not add if found loader_layer_properties is already on the target_list. -void loaderAddLayerNameToList(const struct loader_instance *inst, const char *name, const enum layer_type_flags type_flags, - const struct loader_layer_list *source_list, struct loader_layer_list *target_list, - struct loader_layer_list *expanded_target_list) { +VkResult loaderAddLayerNameToList(const struct loader_instance *inst, const char *name, const enum layer_type_flags type_flags, + const struct loader_layer_list *source_list, struct loader_layer_list *target_list, + struct loader_layer_list *expanded_target_list) { + VkResult res = VK_SUCCESS; bool found = false; for (uint32_t i = 0; i < source_list->count; i++) { struct loader_layer_properties *source_prop = &source_list->list[i]; @@ -1948,9 +1990,17 @@ void loaderAddLayerNameToList(const struct loader_instance *inst, const char *na } } if (!found) { - loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, "loaderAddLayerNameToList: Failed to find layer name %s to activate", - name); + if (strcmp(name, "VK_LAYER_LUNARG_standard_validation")) { + loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, + "loaderAddLayerNameToList: Failed to find layer name %s to activate", name); + } else { + res = VK_ERROR_LAYER_NOT_PRESENT; + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Layer VK_LAYER_LUNARG_standard_validation has been changed to VK_LAYER_KHRONOS_validation. Please use the " + "new version of the layer."); + } } + return res; } static VkExtensionProperties *get_extension_property(const char *name, const struct loader_extension_list *list) { @@ -2251,6 +2301,9 @@ static VkResult loader_scanned_icd_add(const struct loader_instance *inst, struc PFN_vkGetInstanceProcAddr fp_get_proc_addr; PFN_GetPhysicalDeviceProcAddr fp_get_phys_dev_proc_addr = NULL; PFN_vkNegotiateLoaderICDInterfaceVersion fp_negotiate_icd_version; +#if defined(VK_USE_PLATFORM_WIN32_KHR) + PFN_vk_icdEnumerateAdapterPhysicalDevices fp_enum_dxgi_adapter_phys_devs = NULL; +#endif struct loader_scanned_icd *new_scanned_icd; uint32_t interface_vers; VkResult res = VK_SUCCESS; @@ -2337,6 +2390,11 @@ static VkResult loader_scanned_icd_add(const struct loader_instance *inst, struc goto out; } fp_get_phys_dev_proc_addr = loader_platform_get_proc_address(handle, "vk_icdGetPhysicalDeviceProcAddr"); +#if defined(VK_USE_PLATFORM_WIN32_KHR) + if (interface_vers >= 6) { + fp_enum_dxgi_adapter_phys_devs = loader_platform_get_proc_address(handle, "vk_icdEnumerateAdapterPhysicalDevices"); + } +#endif } // check for enough capacity @@ -2362,6 +2420,9 @@ static VkResult loader_scanned_icd_add(const struct loader_instance *inst, struc new_scanned_icd->GetPhysicalDeviceProcAddr = fp_get_phys_dev_proc_addr; new_scanned_icd->EnumerateInstanceExtensionProperties = fp_get_inst_ext_props; new_scanned_icd->CreateInstance = fp_create_inst; +#if defined(VK_USE_PLATFORM_WIN32_KHR) + new_scanned_icd->EnumerateAdapterPhysicalDevices = fp_enum_dxgi_adapter_phys_devs; +#endif new_scanned_icd->interface_version = interface_vers; new_scanned_icd->lib_name = (char *)loader_instance_heap_alloc(inst, strlen(filename) + 1, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); @@ -2430,7 +2491,7 @@ void loader_initialize(void) { // initialize mutexes loader_platform_thread_create_mutex(&loader_lock); loader_platform_thread_create_mutex(&loader_json_lock); - + loader_platform_thread_create_mutex(&loader_preload_icd_lock); // initialize logging loader_debug_init(); @@ -2444,6 +2505,13 @@ void loader_initialize(void) { // This is needed to ensure that newer APIs are available right away // and not after the first call that has been statically linked LoadLibrary("gdi32.dll"); + + TCHAR systemPath[MAX_PATH] = ""; + GetSystemDirectory(systemPath, MAX_PATH); + StringCchCat(systemPath, MAX_PATH, TEXT("\\dxgi.dll")); + HMODULE dxgi_module = LoadLibrary(systemPath); + fpCreateDXGIFactory1 = dxgi_module == NULL ? NULL : + (PFN_CreateDXGIFactory1)GetProcAddress(dxgi_module, "CreateDXGIFactory1"); #endif } @@ -2454,9 +2522,38 @@ struct loader_data_files { }; void loader_release() { + // Guarantee release of the preloaded ICD libraries. This may have already been called in vkDestroyInstance. + loader_unload_preloaded_icds(); + // release mutexes loader_platform_thread_delete_mutex(&loader_lock); loader_platform_thread_delete_mutex(&loader_json_lock); + loader_platform_thread_delete_mutex(&loader_preload_icd_lock); +} + +// Preload the ICD libraries that are likely to be needed so we don't repeatedly load/unload them later +void loader_preload_icds(void) { + loader_platform_thread_lock_mutex(&loader_preload_icd_lock); + + // Already preloaded, skip loading again. + if (scanned_icds.scanned_list != NULL) { + loader_platform_thread_unlock_mutex(&loader_preload_icd_lock); + return; + } + + memset(&scanned_icds, 0, sizeof(scanned_icds)); + VkResult result = loader_icd_scan(NULL, &scanned_icds); + if (result != VK_SUCCESS) { + loader_scanned_icd_clear(NULL, &scanned_icds); + } + loader_platform_thread_unlock_mutex(&loader_preload_icd_lock); +} + +// Release the ICD libraries that were preloaded +void loader_unload_preloaded_icds(void) { + loader_platform_thread_lock_mutex(&loader_preload_icd_lock); + loader_scanned_icd_clear(NULL, &scanned_icds); + loader_platform_thread_unlock_mutex(&loader_preload_icd_lock); } // Get next file or dirname given a string list or registry key path @@ -2562,6 +2659,11 @@ static VkResult loader_get_json(const struct loader_instance *inst, const char * } json_buf[len] = '\0'; + // Can't be a valid json if the string is of length zero + if (len == 0) { + res = VK_ERROR_INITIALIZATION_FAILED; + goto out; + } // Parse text from file *json = cJSON_Parse(json_buf); if (*json == NULL) { @@ -2711,6 +2813,9 @@ static void VerifyAllMetaLayers(struct loader_instance *inst, struct loader_laye if (prop->blacklist_layer_names != NULL) { loader_instance_heap_free(inst, prop->blacklist_layer_names); } + if (prop->override_paths != NULL) { + loader_instance_heap_free(inst, prop->override_paths); + } // Remove the current invalid meta-layer from the layer list. Use memmove since we are // overlapping the source and destination addresses. @@ -2727,6 +2832,75 @@ static void VerifyAllMetaLayers(struct loader_instance *inst, struct loader_laye } } +// If the current working directory matches any app_key_path of the layers, remove all other override layers. +// Otherwise if no matching app_key was found, remove all but the global override layer, which has no app_key_path. +static void RemoveAllNonValidOverrideLayers(struct loader_instance *inst, struct loader_layer_list *instance_layers) { + if (instance_layers == NULL) { + return; + } + + char cur_path[MAX_STRING_SIZE]; + char *ret = loader_platform_executable_path(cur_path, sizeof(cur_path)); + if (ret == NULL) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "RemoveAllNonValidOverrideLayers: Failed to get executable path and name"); + return; + } + + // Find out if there is an override layer with same the app_key_path as the path to the current executable. + // If more than one is found, remove it and use the first layer + // Remove any layers which aren't global and do not have the same app_key_path as the path to the current executable. + bool found_active_override_layer = false; + int global_layer_index = -1; + for (uint32_t i = 0; i < instance_layers->count; i++) { + struct loader_layer_properties *props = &instance_layers->list[i]; + if (strcmp(props->info.layerName, VK_OVERRIDE_LAYER_NAME) == 0) { + if (props->num_app_key_paths > 0) { // not the global layer + for (uint32_t j = 0; j < props->num_app_key_paths; j++) { + if (strcmp(props->app_key_paths[j], cur_path) == 0) { + if (!found_active_override_layer) { + found_active_override_layer = true; + } else { + loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, + "RemoveAllNonValidOverrideLayers: Multiple override layers where the same" + "path in app_keys was found. Using the first layer found"); + + // Remove duplicate active override layers that have the same app_key_path + loaderRemoveLayerInList(inst, instance_layers, i); + i--; + } + } + } + if (!found_active_override_layer) { + // Remove non-global override layers that don't have an app_key that matches cur_path + loaderRemoveLayerInList(inst, instance_layers, i); + i--; + } + } else { + if (global_layer_index == -1) { + global_layer_index = i; + } else { + loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, + "RemoveAllNonValidOverrideLayers: Multiple global override layers " + "found. Using the first global layer found"); + loaderRemoveLayerInList(inst, instance_layers, i); + i--; + } + } + } + } + // Remove global layer if layer with same the app_key_path as the path to the current executable is found + if (found_active_override_layer && global_layer_index >= 0) { + loaderRemoveLayerInList(inst, instance_layers, global_layer_index); + } + // Should be at most 1 override layer in the list now. + if (found_active_override_layer) { + loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0, "Using the override layer for app key %s", cur_path); + } else if (global_layer_index >= 0) { + loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0, "Using the global override layer"); + } +} + // This structure is used to store the json file version // in a more manageable way. typedef struct { @@ -3128,6 +3302,7 @@ static VkResult loaderReadLayerJson(const struct loader_instance *inst, struct l char *vkNegotiateLoaderLayerInterfaceVersion = NULL; char *spec_version = NULL; char **entry_array = NULL; + cJSON *app_keys = NULL; // Layer interface functions // vkGetInstanceProcAddr @@ -3316,6 +3491,45 @@ static VkResult loaderReadLayerJson(const struct loader_instance *inst, struct l } } + props->num_app_key_paths = 0; + props->app_key_paths = NULL; + app_keys = cJSON_GetObjectItem(layer_node, "app_keys"); + if (app_keys != NULL) { + if (strcmp(name, VK_OVERRIDE_LAYER_NAME)) { + loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, + "Layer %s contains app_keys, but any app_keys can only be provided by the override metalayer. " + "These will be ignored.", + name); + } else { + props->num_app_key_paths = cJSON_GetArraySize(app_keys); + + // Allocate the blacklist array + props->app_key_paths = loader_instance_heap_alloc(inst, sizeof(char[MAX_STRING_SIZE]) * props->num_app_key_paths, + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + if (props->app_key_paths == NULL) { + result = VK_ERROR_OUT_OF_HOST_MEMORY; + goto out; + } + + // Copy the app_key_paths into the array + for (i = 0; i < (int)props->num_app_key_paths; ++i) { + cJSON *app_key_path = cJSON_GetArrayItem(app_keys, i); + if (app_key_path == NULL) { + continue; + } + temp = cJSON_Print(app_key_path); + if (temp == NULL) { + result = VK_ERROR_OUT_OF_HOST_MEMORY; + goto out; + } + temp[strlen(temp) - 1] = '\0'; + strncpy(props->app_key_paths[i], temp + 1, MAX_STRING_SIZE - 1); + props->app_key_paths[i][MAX_STRING_SIZE - 1] = '\0'; + cJSON_Free(temp); + } + } + } + result = VK_SUCCESS; out: @@ -3332,12 +3546,17 @@ out: if (NULL != props->override_paths) { loader_instance_heap_free(inst, props->override_paths); } + if (NULL != props->app_key_paths) { + loader_instance_heap_free(inst, props->app_key_paths); + } props->num_blacklist_layers = 0; props->blacklist_layer_names = NULL; props->num_component_layers = 0; props->component_layer_names = NULL; props->num_override_paths = 0; props->override_paths = NULL; + props->num_app_key_paths = 0; + props->app_key_paths = NULL; } return result; @@ -3380,6 +3599,10 @@ static VkResult loaderAddLayerProperties(const struct loader_instance *inst, str layer_json_version json_version = {0, 0, 0}; char *vers_tok; cJSON *disable_environment = NULL; + // Make sure sure the top level json value is an object + if (!json || json->type != 6) { + goto out; + } item = cJSON_GetObjectItem(json, "file_format_version"); if (item == NULL) { goto out; @@ -3596,7 +3819,7 @@ out: } static VkResult AddDataFilesInPath(const struct loader_instance *inst, char *search_path, bool is_directory_list, - struct loader_data_files *out_files) { + struct loader_data_files *out_files, bool use_first_found_manifest) { VkResult vk_result = VK_SUCCESS; DIR *dir_stream = NULL; struct dirent *dir_entry; @@ -3678,6 +3901,9 @@ static VkResult AddDataFilesInPath(const struct loader_instance *inst, char *sea break; } } + if (use_first_found_manifest && out_files->count > 0) { + break; + } } out: @@ -3699,6 +3925,7 @@ static VkResult ReadDataFilesInSearchPaths(const struct loader_instance *inst, e char *search_path = NULL; char *cur_path_ptr = NULL; size_t rel_size = 0; + bool use_first_found_manifest = false; #ifndef _WIN32 bool xdgconfig_alloc = true; bool xdgdata_alloc = true; @@ -3828,6 +4055,10 @@ static VkResult ReadDataFilesInSearchPaths(const struct loader_instance *inst, e memcpy(cur_path_ptr, relative_location, rel_size); cur_path_ptr += rel_size; *cur_path_ptr++ = PATH_SEPARATOR; + // only for ICD manifests + if (env_override != NULL && strcmp(VK_ICD_FILENAMES_ENV_VAR, env_override) == 0) { + use_first_found_manifest = true; + } } CFRelease(ref); } @@ -3853,6 +4084,39 @@ static VkResult ReadDataFilesInSearchPaths(const struct loader_instance *inst, e #endif } + // Remove duplicate paths, or it would result in duplicate extensions, duplicate devices, etc. + // This uses minimal memory, but is O(N^2) on the number of paths. Expect only a few paths. + char path_sep_str[2] = {PATH_SEPARATOR, '\0'}; + size_t search_path_updated_size = strlen(search_path); + for (size_t first = 0; first < search_path_updated_size;) { + // If this is an empty path, erase it + if (search_path[first] == PATH_SEPARATOR) { + memmove(&search_path[first], &search_path[first + 1], search_path_updated_size - first + 1); + search_path_updated_size -= 1; + continue; + } + + size_t first_end = first + 1; + first_end += strcspn(&search_path[first_end], path_sep_str); + for (size_t second = first_end + 1; second < search_path_updated_size;) { + size_t second_end = second + 1; + second_end += strcspn(&search_path[second_end], path_sep_str); + if (first_end - first == second_end - second && + !strncmp(&search_path[first], &search_path[second], second_end - second)) { + // Found duplicate. Include PATH_SEPARATOR in second_end, then erase it from search_path. + if (search_path[second_end] == PATH_SEPARATOR) { + second_end++; + } + memmove(&search_path[second], &search_path[second_end], search_path_updated_size - second_end + 1); + search_path_updated_size -= second_end - second; + } else { + second = second_end + 1; + } + } + first = first_end + 1; + } + search_path_size = search_path_updated_size; + // Print out the paths being searched if debugging is enabled if (search_path_size > 0) { loader_log(inst, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0, @@ -3860,7 +4124,7 @@ static VkResult ReadDataFilesInSearchPaths(const struct loader_instance *inst, e } // Now, parse the paths and add any manifest files found in them. - vk_result = AddDataFilesInPath(inst, search_path, is_directory_list, out_files); + vk_result = AddDataFilesInPath(inst, search_path, is_directory_list, out_files, use_first_found_manifest); if (NULL != override_path) { *override_active = true; @@ -4096,7 +4360,7 @@ static VkResult ReadDataFilesInRegistry(const struct loader_instance *inst, enum } // Now, parse the paths and add any manifest files found in them. - vk_result = AddDataFilesInPath(inst, search_path, false, out_files); + vk_result = AddDataFilesInPath(inst, search_path, false, out_files, false); out: @@ -4216,14 +4480,12 @@ VkResult loader_icd_scan(const struct loader_instance *inst, struct loader_icd_t if (VK_SUCCESS != res) { goto out; } - // Get a list of manifest files for ICDs - res = loaderGetDataFiles(inst, LOADER_DATA_FILE_MANIFEST_ICD, true, "VK_ICD_FILENAMES", NULL, VK_DRIVERS_INFO_REGISTRY_LOC, - VK_DRIVERS_INFO_RELATIVE_DIR, &manifest_files); + res = loaderGetDataFiles(inst, LOADER_DATA_FILE_MANIFEST_ICD, true, VK_ICD_FILENAMES_ENV_VAR, NULL, + VK_DRIVERS_INFO_REGISTRY_LOC, VK_DRIVERS_INFO_RELATIVE_DIR, &manifest_files); if (VK_SUCCESS != res || manifest_files.count == 0) { goto out; } - loader_platform_thread_lock_mutex(&loader_json_lock); lockedMutex = true; for (uint32_t i = 0; i < manifest_files.count; i++) { @@ -4244,6 +4506,7 @@ VkResult loader_icd_scan(const struct loader_instance *inst, struct loader_icd_t res = temp_res; } if (temp_res == VK_ERROR_OUT_OF_HOST_MEMORY) { + res = VK_ERROR_OUT_OF_HOST_MEMORY; break; } else { continue; @@ -4486,12 +4749,16 @@ void loaderScanForLayers(struct loader_instance *inst, struct loader_layer_list VkResult local_res = loaderAddLayerProperties(inst, instance_layers, json, true, file_str); cJSON_Delete(json); - if (VK_SUCCESS != local_res) { + // If the error is anything other than out of memory we still want to try to load the other layers + if (VK_ERROR_OUT_OF_HOST_MEMORY == local_res) { goto out; } } } + // Remove any extraneous override layers. + RemoveAllNonValidOverrideLayers(inst, instance_layers); + // Check to see if the override layer is present, and use it's override paths. for (int32_t i = 0; i < (int32_t)instance_layers->count; i++) { struct loader_layer_properties *prop = &instance_layers->list[i]; @@ -4519,7 +4786,7 @@ void loaderScanForLayers(struct loader_instance *inst, struct loader_layer_list } // Get a list of manifest files for explicit layers - if (VK_SUCCESS != loaderGetDataFiles(inst, LOADER_DATA_FILE_MANIFEST_LAYER, true, "VK_LAYER_PATH", override_paths, + if (VK_SUCCESS != loaderGetDataFiles(inst, LOADER_DATA_FILE_MANIFEST_LAYER, true, VK_LAYER_PATH_ENV_VAR, override_paths, VK_ELAYERS_INFO_REGISTRY_LOC, VK_ELAYERS_INFO_RELATIVE_DIR, &manifest_files)) { goto out; } @@ -4631,6 +4898,9 @@ void loaderScanForImplicitLayers(struct loader_instance *inst, struct loader_lay } } + // Remove any extraneous override layers. + RemoveAllNonValidOverrideLayers(inst, instance_layers); + // Check to see if either the override layer is present, or another implicit meta-layer. // Each of these may require explicit layers to be enabled at this time. for (int32_t i = 0; i < (int32_t)instance_layers->count; i++) { @@ -4667,7 +4937,7 @@ void loaderScanForImplicitLayers(struct loader_instance *inst, struct loader_lay // explicit layer info as well. Not to worry, though, all explicit layers not included // in the override layer will be removed below in loaderRemoveLayersInBlacklist(). if (override_layer_valid || implicit_metalayer_present) { - if (VK_SUCCESS != loaderGetDataFiles(inst, LOADER_DATA_FILE_MANIFEST_LAYER, true, "VK_LAYER_PATH", override_paths, + if (VK_SUCCESS != loaderGetDataFiles(inst, LOADER_DATA_FILE_MANIFEST_LAYER, true, VK_LAYER_PATH_ENV_VAR, override_paths, VK_ELAYERS_INFO_REGISTRY_LOC, VK_ELAYERS_INFO_RELATIVE_DIR, &manifest_files)) { goto out; } @@ -4689,6 +4959,7 @@ void loaderScanForImplicitLayers(struct loader_instance *inst, struct loader_lay res = loaderAddLayerProperties(inst, instance_layers, json, true, file_str); loader_instance_heap_free(inst, file_str); + manifest_files.filename_list[i] = NULL; cJSON_Delete(json); if (VK_ERROR_OUT_OF_HOST_MEMORY == res) { @@ -4713,6 +4984,11 @@ out: if (NULL != override_paths) { loader_instance_heap_free(inst, override_paths); } + for (uint32_t i = 0; i < manifest_files.count; i++) { + if (NULL != manifest_files.filename_list[i]) { + loader_instance_heap_free(inst, manifest_files.filename_list[i]); + } + } if (NULL != manifest_files.filename_list) { loader_instance_heap_free(inst, manifest_files.filename_list); } @@ -5349,9 +5625,11 @@ static void loaderAddImplicitLayers(const struct loader_instance *inst, struct l // Get the layer name(s) from the env_name environment variable. If layer is found in // search_list then add it to layer_list. But only add it to layer_list if type_flags matches. -static void loaderAddEnvironmentLayers(struct loader_instance *inst, const enum layer_type_flags type_flags, const char *env_name, - struct loader_layer_list *target_list, struct loader_layer_list *expanded_target_list, - const struct loader_layer_list *source_list) { +static VkResult loaderAddEnvironmentLayers(struct loader_instance *inst, const enum layer_type_flags type_flags, + const char *env_name, struct loader_layer_list *target_list, + struct loader_layer_list *expanded_target_list, + const struct loader_layer_list *source_list) { + VkResult res = VK_SUCCESS; char *next, *name; char *layer_env = loader_getenv(env_name, inst); if (layer_env == NULL) { @@ -5365,7 +5643,10 @@ static void loaderAddEnvironmentLayers(struct loader_instance *inst, const enum while (name && *name) { next = loader_get_next_path(name); - loaderAddLayerNameToList(inst, name, type_flags, source_list, target_list, expanded_target_list); + res = loaderAddLayerNameToList(inst, name, type_flags, source_list, target_list, expanded_target_list); + if (res != VK_SUCCESS) { + goto out; + } name = next; } @@ -5375,7 +5656,7 @@ out: loader_free_getenv(layer_env, inst); } - return; + return res; } VkResult loaderEnableInstanceLayers(struct loader_instance *inst, const VkInstanceCreateInfo *pCreateInfo, @@ -5404,8 +5685,11 @@ VkResult loaderEnableInstanceLayers(struct loader_instance *inst, const VkInstan loaderAddImplicitLayers(inst, &inst->app_activated_layer_list, &inst->expanded_activated_layer_list, instance_layers); // Add any layers specified via environment variable next - loaderAddEnvironmentLayers(inst, VK_LAYER_TYPE_FLAG_EXPLICIT_LAYER, "VK_INSTANCE_LAYERS", &inst->app_activated_layer_list, - &inst->expanded_activated_layer_list, instance_layers); + err = loaderAddEnvironmentLayers(inst, VK_LAYER_TYPE_FLAG_EXPLICIT_LAYER, "VK_INSTANCE_LAYERS", &inst->app_activated_layer_list, + &inst->expanded_activated_layer_list, instance_layers); + if (err != VK_SUCCESS) { + goto out; + } // Add layers specified by the application err = loaderAddLayerNamesToList(inst, &inst->app_activated_layer_list, &inst->expanded_activated_layer_list, @@ -5427,6 +5711,7 @@ VkResult loaderEnableInstanceLayers(struct loader_instance *inst, const VkInstan } } +out: return err; } @@ -5749,28 +6034,46 @@ VkResult loader_create_instance_chain(const VkInstanceCreateInfo *pCreateInfo, c } } + VkLoaderFeatureFlags feature_flags = 0; +#if defined(_WIN32) + IDXGIFactory6* dxgi_factory = NULL; + HRESULT hres = fpCreateDXGIFactory1(&IID_IDXGIFactory6, &dxgi_factory); + if (hres == S_OK) { + feature_flags |= VK_LOADER_FEATURE_PHYSICAL_DEVICE_SORTING; + dxgi_factory->lpVtbl->Release(dxgi_factory); + } +#endif + PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance)next_gipa(*created_instance, "vkCreateInstance"); if (fpCreateInstance) { - VkLayerInstanceCreateInfo create_info_disp; - - create_info_disp.sType = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO; - create_info_disp.function = VK_LOADER_DATA_CALLBACK; - - create_info_disp.u.pfnSetInstanceLoaderData = vkSetInstanceDispatch; - - create_info_disp.pNext = loader_create_info.pNext; - loader_create_info.pNext = &create_info_disp; - - VkLayerInstanceCreateInfo create_info_disp2; - - create_info_disp2.sType = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO; - create_info_disp2.function = VK_LOADER_LAYER_CREATE_DEVICE_CALLBACK; - - create_info_disp2.u.layerDevice.pfnLayerCreateDevice = loader_layer_create_device; - create_info_disp2.u.layerDevice.pfnLayerDestroyDevice = loader_layer_destroy_device; - - create_info_disp2.pNext = loader_create_info.pNext; - loader_create_info.pNext = &create_info_disp2; + const VkLayerInstanceCreateInfo instance_dispatch = { + .sType = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO, + .pNext = loader_create_info.pNext, + .function = VK_LOADER_DATA_CALLBACK, + .u = { + .pfnSetInstanceLoaderData = vkSetInstanceDispatch, + }, + }; + const VkLayerInstanceCreateInfo device_callback = { + .sType = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO, + .pNext = &instance_dispatch, + .function = VK_LOADER_LAYER_CREATE_DEVICE_CALLBACK, + .u = { + .layerDevice = { + .pfnLayerCreateDevice = loader_layer_create_device, + .pfnLayerDestroyDevice = loader_layer_destroy_device, + }, + }, + }; + const VkLayerInstanceCreateInfo loader_features = { + .sType = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO, + .pNext = &device_callback, + .function = VK_LOADER_FEATURES, + .u = { + .loaderFeatures = feature_flags, + }, + }; + loader_create_info.pNext = &loader_features; res = fpCreateInstance(&loader_create_info, pAllocator, created_instance); } else { @@ -6011,8 +6314,11 @@ VkResult loader_validate_instance_extensions(struct loader_instance *inst, const // Build the lists of active layers (including metalayers) and expanded layers (with metalayers resolved to their components) loaderAddImplicitLayers(inst, &active_layers, &expanded_layers, instance_layers); - loaderAddEnvironmentLayers(inst, VK_LAYER_TYPE_FLAG_EXPLICIT_LAYER, ENABLED_LAYERS_ENV, &active_layers, &expanded_layers, - instance_layers); + res = loaderAddEnvironmentLayers(inst, VK_LAYER_TYPE_FLAG_EXPLICIT_LAYER, ENABLED_LAYERS_ENV, &active_layers, &expanded_layers, + instance_layers); + if (res != VK_SUCCESS) { + goto out; + } res = loaderAddLayerNamesToList(inst, &active_layers, &expanded_layers, pCreateInfo->enabledLayerCount, pCreateInfo->ppEnabledLayerNames, instance_layers); if (VK_SUCCESS != res) { @@ -6579,7 +6885,7 @@ VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateDevice(VkPhysicalDevice physical if (!dev->extensions.khr_device_group_enabled) { VkPhysicalDeviceProperties properties; icd_term->dispatch.GetPhysicalDeviceProperties(phys_dev_term->phys_dev, &properties); - if (properties.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) { + if (properties.apiVersion >= VK_API_VERSION_1_1) { dev->extensions.khr_device_group_enabled = true; } } @@ -6744,11 +7050,143 @@ out: return res; } +struct LoaderSortedPhysicalDevice { + uint32_t device_count; + VkPhysicalDevice* physical_devices; + uint32_t icd_index; + struct loader_icd_term* icd_term; +}; + +// This function allocates an array in sorted_devices which must be freed by the caller if not null +VkResult ReadSortedPhysicalDevices(struct loader_instance *inst, struct LoaderSortedPhysicalDevice **sorted_devices, uint32_t* sorted_count) +{ + VkResult res = VK_SUCCESS; + +#if defined(_WIN32) + uint32_t sorted_alloc = 0; + struct loader_icd_term *icd_term = NULL; + IDXGIFactory6* dxgi_factory = NULL; + HRESULT hres = fpCreateDXGIFactory1(&IID_IDXGIFactory6, &dxgi_factory); + if (hres != S_OK) { + loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0, "Failed to create DXGI factory 6. Physical devices will not be sorted"); + } + else { + sorted_alloc = 16; + *sorted_devices = loader_instance_heap_alloc(inst, sorted_alloc * sizeof(struct LoaderSortedPhysicalDevice), VK_SYSTEM_ALLOCATION_SCOPE_COMMAND); + if (*sorted_devices == NULL) { + res = VK_ERROR_OUT_OF_HOST_MEMORY; + goto out; + } + + memset(*sorted_devices, 0, sorted_alloc * sizeof(struct LoaderSortedPhysicalDevice)); + + *sorted_count = 0; + for (uint32_t i = 0; ; ++i) { + IDXGIAdapter1* adapter; + hres = dxgi_factory->lpVtbl->EnumAdapterByGpuPreference(dxgi_factory, i, DXGI_GPU_PREFERENCE_UNSPECIFIED, &IID_IDXGIAdapter1, &adapter); + if (hres == DXGI_ERROR_NOT_FOUND) { + break; // No more adapters + } + else if (hres != S_OK) { + loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, "Failed to enumerate adapters by GPU preference at index %u. This adapter will not be sorted", i); + break; + } + + DXGI_ADAPTER_DESC1 description; + hres = adapter->lpVtbl->GetDesc1(adapter, &description); + if (hres != S_OK) { + loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, "Failed to get adapter LUID index %u. This adapter will not be sorted", i); + continue; + } + + if (sorted_alloc <= i) { + uint32_t old_size = sorted_alloc * sizeof(struct LoaderSortedPhysicalDevice); + *sorted_devices = loader_instance_heap_realloc(inst, *sorted_devices, old_size, 2 * old_size, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND); + if (*sorted_devices == NULL) { + adapter->lpVtbl->Release(adapter); + res = VK_ERROR_OUT_OF_HOST_MEMORY; + goto out; + } + sorted_alloc *= 2; + } + struct LoaderSortedPhysicalDevice *sorted_array = *sorted_devices; + sorted_array[*sorted_count].device_count = 0; + sorted_array[*sorted_count].physical_devices = NULL; + //*sorted_count = i; + + icd_term = inst->icd_terms; + for (uint32_t icd_idx = 0; NULL != icd_term; icd_term = icd_term->next, icd_idx++) { + // This is the new behavior, which cannot be run unless the ICD provides EnumerateAdapterPhysicalDevices + if (icd_term->scanned_icd->EnumerateAdapterPhysicalDevices == NULL) { + continue; + } + + uint32_t count; + VkResult vkres = icd_term->scanned_icd->EnumerateAdapterPhysicalDevices(icd_term->instance, description.AdapterLuid, &count, NULL); + if (vkres == VK_ERROR_INCOMPATIBLE_DRIVER) { + continue; // This driver doesn't support the adapter + } else if (vkres == VK_ERROR_OUT_OF_HOST_MEMORY) { + res = VK_ERROR_OUT_OF_HOST_MEMORY; + goto out; + } else if (vkres != VK_SUCCESS) { + loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, "Failed to convert DXGI adapter into Vulkan physical device with unexpected error code"); + continue; + } + + // Get the actual physical devices + if (0 != count) + { + do { + sorted_array[*sorted_count].physical_devices = loader_instance_heap_realloc(inst, sorted_array[*sorted_count].physical_devices, sorted_array[*sorted_count].device_count * sizeof(VkPhysicalDevice), count * sizeof(VkPhysicalDevice), VK_SYSTEM_ALLOCATION_SCOPE_COMMAND); + if (sorted_array[*sorted_count].physical_devices == NULL) { + res = VK_ERROR_OUT_OF_HOST_MEMORY; + break; + } + sorted_array[*sorted_count].device_count = count; + } while ((vkres = icd_term->scanned_icd->EnumerateAdapterPhysicalDevices(icd_term->instance, description.AdapterLuid, &count, sorted_array[*sorted_count].physical_devices)) == VK_INCOMPLETE); + } + + if (vkres != VK_SUCCESS) { + loader_instance_heap_free(inst, sorted_array[*sorted_count].physical_devices); + sorted_array[*sorted_count].physical_devices = NULL; + if (vkres == VK_ERROR_OUT_OF_HOST_MEMORY) { + res = VK_ERROR_OUT_OF_HOST_MEMORY; + goto out; + } else { + loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, + "Failed to convert DXGI adapter into Vulkan physical device"); + continue; + } + } + inst->total_gpu_count += (sorted_array[*sorted_count].device_count = count); + sorted_array[*sorted_count].icd_index = icd_idx; + sorted_array[*sorted_count].icd_term = icd_term; + ++(*sorted_count); + } + + adapter->lpVtbl->Release(adapter); + } + + dxgi_factory->lpVtbl->Release(dxgi_factory); + } + +out: +#endif + + if (*sorted_count == 0 && *sorted_devices != NULL) { + loader_instance_heap_free(inst, *sorted_devices); + *sorted_devices = NULL; + } + return res; +} + VkResult setupLoaderTermPhysDevs(struct loader_instance *inst) { VkResult res = VK_SUCCESS; struct loader_icd_term *icd_term; struct loader_phys_dev_per_icd *icd_phys_dev_array = NULL; struct loader_physical_device_term **new_phys_devs = NULL; + struct LoaderSortedPhysicalDevice *sorted_phys_dev_array = NULL; + uint32_t sorted_count = 0; inst->total_gpu_count = 0; @@ -6765,11 +7203,29 @@ VkResult setupLoaderTermPhysDevs(struct loader_instance *inst) { goto out; } memset(icd_phys_dev_array, 0, sizeof(struct loader_phys_dev_per_icd) * inst->total_icd_count); - icd_term = inst->icd_terms; + + // Get the physical devices supported by platform sorting mechanism into a separate list + res = ReadSortedPhysicalDevices(inst, &sorted_phys_dev_array, &sorted_count); + if (VK_SUCCESS != res) { + goto out; + } // For each ICD, query the number of physical devices, and then get an // internal value for those physical devices. + icd_term = inst->icd_terms; for (uint32_t icd_idx = 0; NULL != icd_term; icd_term = icd_term->next, icd_idx++) { + icd_phys_dev_array[icd_idx].count = 0; + icd_phys_dev_array[icd_idx].phys_devs = NULL; + icd_phys_dev_array[icd_idx].this_icd_term = NULL; + + // This is the legacy behavior which should be skipped if EnumerateAdapterPhysicalDevices is available + // and we successfully enumerated sorted adapters using ReadSortedPhysicalDevices. +#if defined(VK_USE_PLATFORM_WIN32_KHR) + if (sorted_count && icd_term->scanned_icd->EnumerateAdapterPhysicalDevices != NULL) { + continue; + } +#endif + res = icd_term->dispatch.EnumeratePhysicalDevices(icd_term->instance, &icd_phys_dev_array[icd_idx].count, NULL); if (VK_SUCCESS != res) { loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, @@ -6822,6 +7278,48 @@ VkResult setupLoaderTermPhysDevs(struct loader_instance *inst) { // Copy or create everything to fill the new array of physical devices uint32_t idx = 0; + +#if defined(_WIN32) + // Copy over everything found through sorted enumeration + for (uint32_t i = 0; i < sorted_count; ++i) { + for (uint32_t j = 0; j < sorted_phys_dev_array[i].device_count; ++j) { + // Check if this physical device is already in the old buffer + if (NULL != inst->phys_devs_term) { + for (uint32_t old_idx = 0; old_idx < inst->phys_dev_count_term; old_idx++) { + if (sorted_phys_dev_array[i].physical_devices[j] == inst->phys_devs_term[old_idx]->phys_dev) { + new_phys_devs[idx] = inst->phys_devs_term[old_idx]; + break; + } + } + } + + // If this physical device isn't in the old buffer, then we need to create it. + if (NULL == new_phys_devs[idx]) { + new_phys_devs[idx] = loader_instance_heap_alloc(inst, sizeof(struct loader_physical_device_term), + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + if (NULL == new_phys_devs[idx]) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "setupLoaderTermPhysDevs: Failed to allocate " + "physical device terminator object %d", + idx); + inst->total_gpu_count = idx; + res = VK_ERROR_OUT_OF_HOST_MEMORY; + goto out; + } + + loader_set_dispatch((void *)new_phys_devs[idx], inst->disp); + new_phys_devs[idx]->this_icd_term = sorted_phys_dev_array[i].icd_term; + new_phys_devs[idx]->icd_index = (uint8_t)(sorted_phys_dev_array[i].icd_index); + new_phys_devs[idx]->phys_dev = sorted_phys_dev_array[i].physical_devices[j]; + } + + // Increment the count of new physical devices + idx++; + } + } +#endif + + // Copy over everything found through EnumeratePhysicalDevices for (uint32_t icd_idx = 0; icd_idx < inst->total_icd_count; icd_idx++) { for (uint32_t pd_idx = 0; pd_idx < icd_phys_dev_array[icd_idx].count; pd_idx++) { // Check if this physical device is already in the old buffer @@ -6893,6 +7391,15 @@ out: inst->phys_devs_term = new_phys_devs; } + if (sorted_phys_dev_array != NULL) { + for (uint32_t i = 0; i < sorted_count; ++i) { + if (sorted_phys_dev_array[i].device_count > 0 && sorted_phys_dev_array[i].physical_devices != NULL) { + loader_instance_heap_free(inst, sorted_phys_dev_array[i].physical_devices); + } + } + loader_instance_heap_free(inst, sorted_phys_dev_array); + } + return res; } @@ -7036,7 +7543,6 @@ VKAPI_ATTR VkResult VKAPI_CALL terminator_EnumerateDeviceExtensionProperties(VkP if (pProperties == NULL) { *pPropertyCount = count; loader_destroy_generic_list(inst, (struct loader_generic_list *)&local_ext_list); - loader_platform_thread_unlock_mutex(&loader_lock); return VK_SUCCESS; } @@ -7048,14 +7554,12 @@ VKAPI_ATTR VkResult VKAPI_CALL terminator_EnumerateDeviceExtensionProperties(VkP loader_destroy_generic_list(inst, (struct loader_generic_list *)&local_ext_list); if (copy_size < count) { - loader_platform_thread_unlock_mutex(&loader_lock); return VK_INCOMPLETE; } } else { loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, "vkEnumerateDeviceExtensionProperties: pLayerName " "is too long or is badly formed"); - loader_platform_thread_unlock_mutex(&loader_lock); return VK_ERROR_EXTENSION_NOT_PRESENT; } @@ -7065,10 +7569,26 @@ VKAPI_ATTR VkResult VKAPI_CALL terminator_EnumerateDeviceExtensionProperties(VkP // This case is during the call down the instance chain with pLayerName == NULL struct loader_icd_term *icd_term = phys_dev_term->this_icd_term; uint32_t icd_ext_count = *pPropertyCount; + VkExtensionProperties *icd_props_list = pProperties; VkResult res; - // Get the available device extensions - res = icd_term->dispatch.EnumerateDeviceExtensionProperties(phys_dev_term->phys_dev, NULL, &icd_ext_count, pProperties); + if (NULL == icd_props_list) { + // We need to find the count without duplicates. This requires querying the driver for the names of the extensions. + // A small amount of storage is then needed to facilitate the de-duplication. + res = icd_term->dispatch.EnumerateDeviceExtensionProperties(phys_dev_term->phys_dev, NULL, &icd_ext_count, NULL); + if (res != VK_SUCCESS) { + goto out; + } + icd_props_list = loader_instance_heap_alloc(icd_term->this_instance, sizeof(VkExtensionProperties) * icd_ext_count, + VK_SYSTEM_ALLOCATION_SCOPE_COMMAND); + if (NULL == icd_props_list) { + res = VK_ERROR_OUT_OF_HOST_MEMORY; + goto out; + } + } + + // Get the available device extension count, and if pProperties is not NULL, the extensions as well + res = icd_term->dispatch.EnumerateDeviceExtensionProperties(phys_dev_term->phys_dev, NULL, &icd_ext_count, icd_props_list); if (res != VK_SUCCESS) { goto out; } @@ -7079,36 +7599,36 @@ VKAPI_ATTR VkResult VKAPI_CALL terminator_EnumerateDeviceExtensionProperties(VkP } loaderAddImplicitLayers(icd_term->this_instance, &implicit_layer_list, NULL, &icd_term->this_instance->instance_layer_list); + + // Initialize dev_extension list within the physicalDevice object + res = loader_init_device_extensions(icd_term->this_instance, phys_dev_term, icd_ext_count, icd_props_list, &icd_exts); + if (res != VK_SUCCESS) { + goto out; + } + // We need to determine which implicit layers are active, and then add their extensions. This can't be cached as // it depends on results of environment variables (which can change). - if (pProperties != NULL) { - // Initialize dev_extension list within the physicalDevice object - res = loader_init_device_extensions(icd_term->this_instance, phys_dev_term, icd_ext_count, pProperties, &icd_exts); - if (res != VK_SUCCESS) { - goto out; - } + res = loader_add_to_ext_list(icd_term->this_instance, &all_exts, icd_exts.count, icd_exts.list); + if (res != VK_SUCCESS) { + goto out; + } - // We need to determine which implicit layers are active, and then add their extensions. This can't be cached as - // it depends on results of environment variables (which can change). - res = loader_add_to_ext_list(icd_term->this_instance, &all_exts, icd_exts.count, icd_exts.list); - if (res != VK_SUCCESS) { - goto out; - } + loaderAddImplicitLayers(icd_term->this_instance, &implicit_layer_list, NULL, &icd_term->this_instance->instance_layer_list); - loaderAddImplicitLayers(icd_term->this_instance, &implicit_layer_list, NULL, &icd_term->this_instance->instance_layer_list); - - for (uint32_t i = 0; i < implicit_layer_list.count; i++) { - for (uint32_t j = 0; j < implicit_layer_list.list[i].device_extension_list.count; j++) { - res = loader_add_to_ext_list(icd_term->this_instance, &all_exts, 1, - &implicit_layer_list.list[i].device_extension_list.list[j].props); - if (res != VK_SUCCESS) { - goto out; - } + for (uint32_t i = 0; i < implicit_layer_list.count; i++) { + for (uint32_t j = 0; j < implicit_layer_list.list[i].device_extension_list.count; j++) { + res = loader_add_to_ext_list(icd_term->this_instance, &all_exts, 1, + &implicit_layer_list.list[i].device_extension_list.list[j].props); + if (res != VK_SUCCESS) { + goto out; } } - uint32_t capacity = *pPropertyCount; - VkExtensionProperties *props = pProperties; + } + uint32_t capacity = *pPropertyCount; + VkExtensionProperties *props = pProperties; + res = VK_SUCCESS; + if (NULL != pProperties) { for (uint32_t i = 0; i < all_exts.count && i < capacity; i++) { props[i] = all_exts.list[i]; } @@ -7120,14 +7640,7 @@ VKAPI_ATTR VkResult VKAPI_CALL terminator_EnumerateDeviceExtensionProperties(VkP *pPropertyCount = all_exts.count; } } else { - // Just return the count; need to add in the count of implicit layer extensions - // don't worry about duplicates being added in the count - *pPropertyCount = icd_ext_count; - - for (uint32_t i = 0; i < implicit_layer_list.count; i++) { - *pPropertyCount += implicit_layer_list.list[i].device_extension_list.count; - } - res = VK_SUCCESS; + *pPropertyCount = all_exts.count; } out: @@ -7141,7 +7654,9 @@ out: if (NULL != icd_exts.list) { loader_destroy_generic_list(icd_term->this_instance, (struct loader_generic_list *)&icd_exts); } - + if (NULL == pProperties && NULL != icd_props_list) { + loader_instance_heap_free(icd_term->this_instance, icd_props_list); + } return res; } @@ -7201,7 +7716,7 @@ VKAPI_ATTR VkResult VKAPI_CALL terminator_EnumerateInstanceVersion(const VkEnumerateInstanceVersionChain *chain, uint32_t* pApiVersion) { // NOTE: The Vulkan WG doesn't want us checking pApiVersion for NULL, but instead // prefers us crashing. - *pApiVersion = VK_MAKE_VERSION(loader_major_version, loader_minor_version, VK_HEADER_VERSION); + *pApiVersion = VK_HEADER_VERSION_COMPLETE; return VK_SUCCESS; } @@ -7238,10 +7753,14 @@ terminator_EnumerateInstanceExtensionProperties(const VkEnumerateInstanceExtensi } } } else { + // Preload ICD libraries so subsequent calls to EnumerateInstanceExtensionProperties don't have to load them + loader_preload_icds(); + // Scan/discover all ICD libraries memset(&icd_tramp_list, 0, sizeof(icd_tramp_list)); res = loader_icd_scan(NULL, &icd_tramp_list); - if (VK_SUCCESS != res) { + // EnumerateInstanceExtensionProperties can't return anything other than OOM or VK_ERROR_LAYER_NOT_PRESENT + if ((VK_SUCCESS != res && icd_tramp_list.count > 0) || res == VK_ERROR_OUT_OF_HOST_MEMORY) { goto out; } // Get extensions from all ICD's, merge so no duplicates @@ -7330,7 +7849,7 @@ out: return result; } -#if defined(_WIN32) && defined(LOADER_DYNAMIC_LIB) +#if defined(_WIN32) BOOL WINAPI DllMain(HINSTANCE hinst, DWORD reason, LPVOID reserved) { switch (reason) { case DLL_PROCESS_ATTACH: @@ -7362,7 +7881,10 @@ VkResult setupLoaderTermPhysDevGroups(struct loader_instance *inst) { uint32_t cur_icd_group_count = 0; VkPhysicalDeviceGroupPropertiesKHR **new_phys_dev_groups = NULL; VkPhysicalDeviceGroupPropertiesKHR *local_phys_dev_groups = NULL; + bool *local_phys_dev_group_sorted = NULL; PFN_vkEnumeratePhysicalDeviceGroups fpEnumeratePhysicalDeviceGroups = NULL; + struct LoaderSortedPhysicalDevice* sorted_phys_dev_array = NULL; + uint32_t sorted_count = 0; if (0 == inst->phys_dev_count_term) { loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, @@ -7426,7 +7948,8 @@ VkResult setupLoaderTermPhysDevGroups(struct loader_instance *inst) { // Create a temporary array (on the stack) to keep track of the // returned VkPhysicalDevice values. local_phys_dev_groups = loader_stack_alloc(sizeof(VkPhysicalDeviceGroupProperties) * total_count); - if (NULL == local_phys_dev_groups) { + local_phys_dev_group_sorted = loader_stack_alloc(sizeof(bool) * total_count); + if (NULL == local_phys_dev_groups || NULL == local_phys_dev_group_sorted) { loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, "setupLoaderTermPhysDevGroups: Failed to allocate local " "physical device group array of size %d", @@ -7436,17 +7959,31 @@ VkResult setupLoaderTermPhysDevGroups(struct loader_instance *inst) { } // Initialize the memory to something valid memset(local_phys_dev_groups, 0, sizeof(VkPhysicalDeviceGroupProperties) * total_count); + memset(local_phys_dev_group_sorted, 0, sizeof(bool) * total_count); for (uint32_t group = 0; group < total_count; group++) { local_phys_dev_groups[group].sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR; local_phys_dev_groups[group].pNext = NULL; local_phys_dev_groups[group].subsetAllocation = false; } + // Get the physical devices supported by platform sorting mechanism into a separate list + res = ReadSortedPhysicalDevices(inst, &sorted_phys_dev_array, &sorted_count); + if (VK_SUCCESS != res) { + goto out; + } + cur_icd_group_count = 0; icd_term = inst->icd_terms; for (uint32_t icd_idx = 0; NULL != icd_term; icd_term = icd_term->next, icd_idx++) { uint32_t count_this_time = total_count - cur_icd_group_count; + // Check if this group can be sorted +#if defined(VK_USE_PLATFORM_WIN32_KHR) + bool icd_sorted = sorted_count && (icd_term->scanned_icd->EnumerateAdapterPhysicalDevices != NULL); +#else + bool icd_sorted = false; +#endif + // Get the function pointer to use to call into the ICD. This could be the core or KHR version if (inst->enabled_known_extensions.khr_device_group_creation) { fpEnumeratePhysicalDeviceGroups = icd_term->dispatch.EnumeratePhysicalDeviceGroupsKHR; @@ -7478,10 +8015,14 @@ VkResult setupLoaderTermPhysDevGroups(struct loader_instance *inst) { for (uint32_t indiv_gpu = 0; indiv_gpu < count_this_time; indiv_gpu++) { local_phys_dev_groups[indiv_gpu + cur_icd_group_count].physicalDeviceCount = 1; local_phys_dev_groups[indiv_gpu + cur_icd_group_count].physicalDevices[0] = phys_dev_array[indiv_gpu]; + local_phys_dev_group_sorted[indiv_gpu + cur_icd_group_count] = icd_sorted; } } else { res = fpEnumeratePhysicalDeviceGroups(icd_term->instance, &count_this_time, &local_phys_dev_groups[cur_icd_group_count]); + for (uint32_t group = 0; group < count_this_time; ++group) { + local_phys_dev_group_sorted[group + cur_icd_group_count] = icd_sorted; + } if (VK_SUCCESS != res) { loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, "setupLoaderTermPhysDevGroups: Failed during dispatch call of " @@ -7516,8 +8057,87 @@ VkResult setupLoaderTermPhysDevGroups(struct loader_instance *inst) { } } + uint32_t idx = 0; + +#if defined(_WIN32) + // Copy over everything found through sorted enumeration + for (uint32_t i = 0; i < sorted_count; ++i) { + + // Find the VkPhysicalDeviceGroupProperties object in local_phys_dev_groups + VkPhysicalDeviceGroupProperties *group_properties = NULL; + for (uint32_t group = 0; group < total_count; group++) { + if (sorted_phys_dev_array[i].device_count != local_phys_dev_groups[group].physicalDeviceCount) { + continue; + } + + bool match = true; + for (uint32_t group_gpu = 0; group_gpu < local_phys_dev_groups[group].physicalDeviceCount; group_gpu++) { + if (sorted_phys_dev_array[i].physical_devices[group_gpu] != ((struct loader_physical_device_term*) local_phys_dev_groups[group].physicalDevices[group_gpu])->phys_dev) { + match = false; + break; + } + } + + if (match) { + group_properties = &local_phys_dev_groups[group]; + } + } + + // Check if this physical device group with the same contents is already in the old buffer + for (uint32_t old_idx = 0; old_idx < inst->phys_dev_group_count_term; old_idx++) { + if (NULL != group_properties && group_properties->physicalDeviceCount == inst->phys_dev_groups_term[old_idx]->physicalDeviceCount) { + bool found_all_gpus = true; + for (uint32_t old_gpu = 0; old_gpu < inst->phys_dev_groups_term[old_idx]->physicalDeviceCount; old_gpu++) { + bool found_gpu = false; + for (uint32_t new_gpu = 0; new_gpu < group_properties->physicalDeviceCount; new_gpu++) { + if (group_properties->physicalDevices[new_gpu] == inst->phys_dev_groups_term[old_idx]->physicalDevices[old_gpu]) { + found_gpu = true; + break; + } + } + + if (!found_gpu) { + found_all_gpus = false; + break; + } + } + if (!found_all_gpus) { + continue; + } + else { + new_phys_dev_groups[idx] = inst->phys_dev_groups_term[old_idx]; + break; + } + } + } + + // If this physical device group isn't in the old buffer, create it + if (group_properties != NULL && NULL == new_phys_dev_groups[idx]) { + new_phys_dev_groups[idx] = (VkPhysicalDeviceGroupPropertiesKHR*)loader_instance_heap_alloc( + inst, sizeof(VkPhysicalDeviceGroupPropertiesKHR), VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + if (NULL == new_phys_dev_groups[idx]) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "setupLoaderTermPhysDevGroups: Failed to allocate " + "physical device group Terminator object %d", + idx); + total_count = idx; + res = VK_ERROR_OUT_OF_HOST_MEMORY; + goto out; + } + memcpy(new_phys_dev_groups[idx], group_properties, sizeof(VkPhysicalDeviceGroupPropertiesKHR)); + } + + ++idx; + } +#endif + // Copy or create everything to fill the new array of physical device groups for (uint32_t new_idx = 0; new_idx < total_count; new_idx++) { + // Skip groups which have been included through sorting + if (local_phys_dev_group_sorted[new_idx] || local_phys_dev_groups[new_idx].physicalDeviceCount == 0) { + continue; + } + // Check if this physical device group with the same contents is already in the old buffer for (uint32_t old_idx = 0; old_idx < inst->phys_dev_group_count_term; old_idx++) { if (local_phys_dev_groups[new_idx].physicalDeviceCount == inst->phys_dev_groups_term[old_idx]->physicalDeviceCount) { @@ -7539,28 +8159,30 @@ VkResult setupLoaderTermPhysDevGroups(struct loader_instance *inst) { if (!found_all_gpus) { continue; } else { - new_phys_dev_groups[new_idx] = inst->phys_dev_groups_term[old_idx]; + new_phys_dev_groups[idx] = inst->phys_dev_groups_term[old_idx]; break; } } } // If this physical device group isn't in the old buffer, create it - if (NULL == new_phys_dev_groups[new_idx]) { - new_phys_dev_groups[new_idx] = (VkPhysicalDeviceGroupPropertiesKHR *)loader_instance_heap_alloc( + if (NULL == new_phys_dev_groups[idx]) { + new_phys_dev_groups[idx] = (VkPhysicalDeviceGroupPropertiesKHR *)loader_instance_heap_alloc( inst, sizeof(VkPhysicalDeviceGroupPropertiesKHR), VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); - if (NULL == new_phys_dev_groups[new_idx]) { + if (NULL == new_phys_dev_groups[idx]) { loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, "setupLoaderTermPhysDevGroups: Failed to allocate " "physical device group Terminator object %d", - new_idx); - total_count = new_idx; + idx); + total_count = idx; res = VK_ERROR_OUT_OF_HOST_MEMORY; goto out; } - memcpy(new_phys_dev_groups[new_idx], &local_phys_dev_groups[new_idx], + memcpy(new_phys_dev_groups[idx], &local_phys_dev_groups[new_idx], sizeof(VkPhysicalDeviceGroupPropertiesKHR)); } + + ++idx; } out: @@ -7597,6 +8219,15 @@ out: inst->phys_dev_groups_term = new_phys_dev_groups; } + if (sorted_phys_dev_array != NULL) { + for (uint32_t i = 0; i < sorted_count; ++i) { + if (sorted_phys_dev_array[i].device_count > 0 && sorted_phys_dev_array[i].physical_devices != NULL) { + loader_instance_heap_free(inst, sorted_phys_dev_array[i].physical_devices); + } + } + loader_instance_heap_free(inst, sorted_phys_dev_array); + } + return res; } diff --git a/thirdparty/vulkan/loader/loader.h b/thirdparty/vulkan/loader/loader.h index 56745a968d6..621eb0be3ba 100644 --- a/thirdparty/vulkan/loader/loader.h +++ b/thirdparty/vulkan/loader/loader.h @@ -156,6 +156,8 @@ struct loader_layer_properties { bool keep; uint32_t num_blacklist_layers; char (*blacklist_layer_names)[MAX_STRING_SIZE]; + uint32_t num_app_key_paths; + char (*app_key_paths)[MAX_STRING_SIZE]; }; struct loader_layer_list { @@ -324,6 +326,9 @@ struct loader_instance { #ifdef VK_USE_PLATFORM_XLIB_KHR bool wsi_xlib_surface_enabled; #endif +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + bool wsi_directfb_surface_enabled; +#endif #ifdef VK_USE_PLATFORM_ANDROID_KHR bool wsi_android_surface_enabled; #endif @@ -382,6 +387,9 @@ struct loader_scanned_icd { PFN_GetPhysicalDeviceProcAddr GetPhysicalDeviceProcAddr; PFN_vkCreateInstance CreateInstance; PFN_vkEnumerateInstanceExtensionProperties EnumerateInstanceExtensionProperties; +#if defined(VK_USE_PLATFORM_WIN32_KHR) + PFN_vk_icdEnumerateAdapterPhysicalDevices EnumerateAdapterPhysicalDevices; +#endif }; static inline struct loader_instance *loader_instance(VkInstance instance) { return (struct loader_instance *)instance; } @@ -420,11 +428,9 @@ static inline void loader_init_dispatch(void *obj, const void *data) { // Global variables used across files extern struct loader_struct loader; extern THREAD_LOCAL_DECL struct loader_instance *tls_instance; -#if defined(_WIN32) && !defined(LOADER_DYNAMIC_LIB) -extern LOADER_PLATFORM_THREAD_ONCE_DEFINITION(once_init); -#endif extern loader_platform_thread_mutex loader_lock; extern loader_platform_thread_mutex loader_json_lock; +extern loader_platform_thread_mutex loader_preload_icd_lock; struct loader_msg_callback_map_entry { VkDebugReportCallbackEXT icd_obj; @@ -455,6 +461,8 @@ VkResult loader_validate_instance_extensions(struct loader_instance *inst, const const VkInstanceCreateInfo *pCreateInfo); void loader_initialize(void); +void loader_preload_icds(void); +void loader_unload_preloaded_icds(void); bool has_vk_extension_property_array(const VkExtensionProperties *vk_ext_prop, const uint32_t count, const VkExtensionProperties *ext_array); bool has_vk_extension_property(const VkExtensionProperties *vk_ext_prop, const struct loader_extension_list *ext_list); @@ -471,9 +479,9 @@ VkResult loader_init_generic_list(const struct loader_instance *inst, struct loa void loader_destroy_generic_list(const struct loader_instance *inst, struct loader_generic_list *list); void loaderDestroyLayerList(const struct loader_instance *inst, struct loader_device *device, struct loader_layer_list *layer_list); void loaderDeleteLayerListAndProperties(const struct loader_instance *inst, struct loader_layer_list *layer_list); -void loaderAddLayerNameToList(const struct loader_instance *inst, const char *name, const enum layer_type_flags type_flags, - const struct loader_layer_list *source_list, struct loader_layer_list *target_list, - struct loader_layer_list *expanded_target_list); +VkResult loaderAddLayerNameToList(const struct loader_instance *inst, const char *name, const enum layer_type_flags type_flags, + const struct loader_layer_list *source_list, struct loader_layer_list *target_list, + struct loader_layer_list *expanded_target_list); void loader_scanned_icd_clear(const struct loader_instance *inst, struct loader_icd_tramp_list *icd_tramp_list); VkResult loader_icd_scan(const struct loader_instance *inst, struct loader_icd_tramp_list *icd_tramp_list); void loaderScanForLayers(struct loader_instance *inst, struct loader_layer_list *instance_layers); diff --git a/thirdparty/vulkan/loader/trampoline.c b/thirdparty/vulkan/loader/trampoline.c index a6a37f0fce8..e0530802a2a 100644 --- a/thirdparty/vulkan/loader/trampoline.c +++ b/thirdparty/vulkan/loader/trampoline.c @@ -127,7 +127,7 @@ LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionPropert } size_t lib_count = 0; - // Prepend layers onto the chain if they implment this entry point + // Prepend layers onto the chain if they implement this entry point for (uint32_t i = 0; i < layers.count; ++i) { if (!loaderImplicitLayerIsEnabled(NULL, layers.list + i) || layers.list[i].pre_instance_functions.enumerate_instance_extension_properties[0] == '\0') { @@ -221,7 +221,7 @@ LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties( } size_t lib_count = 0; - // Prepend layers onto the chain if they implment this entry point + // Prepend layers onto the chain if they implement this entry point for (uint32_t i = 0; i < layers.count; ++i) { if (!loaderImplicitLayerIsEnabled(NULL, layers.list + i) || layers.list[i].pre_instance_functions.enumerate_instance_layer_properties[0] == '\0') { @@ -315,7 +315,7 @@ LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceVersion(uint32_t } size_t lib_count = 0; - // Prepend layers onto the chain if they implment this entry point + // Prepend layers onto the chain if they implement this entry point for (uint32_t i = 0; i < layers.count; ++i) { if (!loaderImplicitLayerIsEnabled(NULL, layers.list + i) || layers.list[i].pre_instance_functions.enumerate_instance_version[0] == '\0') { @@ -538,7 +538,7 @@ out: if (NULL != ptr_instance) { if (res != VK_SUCCESS) { - if (NULL != ptr_instance->next) { + if (loader.instances == ptr_instance) { loader.instances = ptr_instance->next; } if (NULL != ptr_instance->disp) { @@ -656,9 +656,14 @@ LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyInstance(VkInstance instance, ptr_instance->tmp_report_callbacks); util_FreeDebugReportCreateInfos(pAllocator, ptr_instance->tmp_report_create_infos, ptr_instance->tmp_report_callbacks); } + loader_instance_heap_free(ptr_instance, ptr_instance->disp); loader_instance_heap_free(ptr_instance, ptr_instance); loader_platform_thread_unlock_mutex(&loader_lock); + + // Unload preloaded layers, so if vkEnumerateInstanceExtensionProperties or vkCreateInstance is called again, the ICD's are up + // to date + loader_unload_preloaded_icds(); } LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount, @@ -859,7 +864,9 @@ LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue(VkDevice device, uint3 disp = loader_get_dispatch(device); disp->GetDeviceQueue(device, queueNodeIndex, queueIndex, pQueue); - loader_set_dispatch(*pQueue, disp); + if (pQueue != NULL) { + loader_set_dispatch(*pQueue, disp); + } } LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, @@ -2445,8 +2452,7 @@ LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkTrimCommandPool( LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) { const VkLayerDispatchTable *disp = loader_get_dispatch(device); disp->GetDeviceQueue2(device, pQueueInfo, pQueue); - if (*pQueue != VK_NULL_HANDLE) - { + if (pQueue != NULL && *pQueue != NULL) { loader_set_dispatch(*pQueue, disp); } } diff --git a/thirdparty/vulkan/loader/vk_dispatch_table_helper.h b/thirdparty/vulkan/loader/vk_dispatch_table_helper.h index 386745a1e72..e6026aa6980 100644 --- a/thirdparty/vulkan/loader/vk_dispatch_table_helper.h +++ b/thirdparty/vulkan/loader/vk_dispatch_table_helper.h @@ -91,12 +91,33 @@ static VKAPI_ATTR void VKAPI_CALL StubCmdDrawIndexedIndirectCountKHR(VkCommandBu static VKAPI_ATTR VkResult VKAPI_CALL StubGetSemaphoreCounterValueKHR(VkDevice device, VkSemaphore semaphore, uint64_t* pValue) { return VK_SUCCESS; }; static VKAPI_ATTR VkResult VKAPI_CALL StubWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout) { return VK_SUCCESS; }; static VKAPI_ATTR VkResult VKAPI_CALL StubSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo) { return VK_SUCCESS; }; -static VKAPI_ATTR void VKAPI_CALL StubGetBufferDeviceAddressKHR(VkDevice device, const VkBufferDeviceAddressInfo* pInfo) { }; -static VKAPI_ATTR void VKAPI_CALL StubGetBufferOpaqueCaptureAddressKHR(VkDevice device, const VkBufferDeviceAddressInfo* pInfo) { }; -static VKAPI_ATTR void VKAPI_CALL StubGetDeviceMemoryOpaqueCaptureAddressKHR(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo) { }; +static VKAPI_ATTR VkDeviceAddress VKAPI_CALL StubGetBufferDeviceAddressKHR(VkDevice device, const VkBufferDeviceAddressInfo* pInfo) { return 0L; }; +static VKAPI_ATTR uint64_t VKAPI_CALL StubGetBufferOpaqueCaptureAddressKHR(VkDevice device, const VkBufferDeviceAddressInfo* pInfo) { return 0L; }; +static VKAPI_ATTR uint64_t VKAPI_CALL StubGetDeviceMemoryOpaqueCaptureAddressKHR(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo) { return 0L; }; +#ifdef VK_ENABLE_BETA_EXTENSIONS +static VKAPI_ATTR VkResult VKAPI_CALL StubCreateDeferredOperationKHR(VkDevice device, const VkAllocationCallbacks* pAllocator, VkDeferredOperationKHR* pDeferredOperation) { return VK_SUCCESS; }; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +static VKAPI_ATTR void VKAPI_CALL StubDestroyDeferredOperationKHR(VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks* pAllocator) { }; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +static VKAPI_ATTR uint32_t VKAPI_CALL StubGetDeferredOperationMaxConcurrencyKHR(VkDevice device, VkDeferredOperationKHR operation) { return 0; }; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +static VKAPI_ATTR VkResult VKAPI_CALL StubGetDeferredOperationResultKHR(VkDevice device, VkDeferredOperationKHR operation) { return VK_SUCCESS; }; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +static VKAPI_ATTR VkResult VKAPI_CALL StubDeferredOperationJoinKHR(VkDevice device, VkDeferredOperationKHR operation) { return VK_SUCCESS; }; +#endif // VK_ENABLE_BETA_EXTENSIONS static VKAPI_ATTR VkResult VKAPI_CALL StubGetPipelineExecutablePropertiesKHR(VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties) { return VK_SUCCESS; }; static VKAPI_ATTR VkResult VKAPI_CALL StubGetPipelineExecutableStatisticsKHR(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics) { return VK_SUCCESS; }; static VKAPI_ATTR VkResult VKAPI_CALL StubGetPipelineExecutableInternalRepresentationsKHR(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations) { return VK_SUCCESS; }; +static VKAPI_ATTR void VKAPI_CALL StubCmdCopyBuffer2KHR(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR* pCopyBufferInfo) { }; +static VKAPI_ATTR void VKAPI_CALL StubCmdCopyImage2KHR(VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR* pCopyImageInfo) { }; +static VKAPI_ATTR void VKAPI_CALL StubCmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2KHR* pCopyBufferToImageInfo) { }; +static VKAPI_ATTR void VKAPI_CALL StubCmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2KHR* pCopyImageToBufferInfo) { }; +static VKAPI_ATTR void VKAPI_CALL StubCmdBlitImage2KHR(VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR* pBlitImageInfo) { }; +static VKAPI_ATTR void VKAPI_CALL StubCmdResolveImage2KHR(VkCommandBuffer commandBuffer, const VkResolveImageInfo2KHR* pResolveImageInfo) { }; static VKAPI_ATTR VkResult VKAPI_CALL StubDebugMarkerSetObjectTagEXT(VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo) { return VK_SUCCESS; }; static VKAPI_ATTR VkResult VKAPI_CALL StubDebugMarkerSetObjectNameEXT(VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo) { return VK_SUCCESS; }; static VKAPI_ATTR void VKAPI_CALL StubCmdDebugMarkerBeginEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) { }; @@ -108,7 +129,8 @@ static VKAPI_ATTR void VKAPI_CALL StubCmdEndTransformFeedbackEXT(VkCommandBuffer static VKAPI_ATTR void VKAPI_CALL StubCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index) { }; static VKAPI_ATTR void VKAPI_CALL StubCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index) { }; static VKAPI_ATTR void VKAPI_CALL StubCmdDrawIndirectByteCountEXT(VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride) { }; -static VKAPI_ATTR void VKAPI_CALL StubGetImageViewHandleNVX(VkDevice device, const VkImageViewHandleInfoNVX* pInfo) { }; +static VKAPI_ATTR uint32_t VKAPI_CALL StubGetImageViewHandleNVX(VkDevice device, const VkImageViewHandleInfoNVX* pInfo) { return 0; }; +static VKAPI_ATTR VkResult VKAPI_CALL StubGetImageViewAddressNVX(VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX* pProperties) { return VK_SUCCESS; }; static VKAPI_ATTR void VKAPI_CALL StubCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) { }; static VKAPI_ATTR void VKAPI_CALL StubCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) { }; static VKAPI_ATTR VkResult VKAPI_CALL StubGetShaderInfoAMD(VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo) { return VK_SUCCESS; }; @@ -117,14 +139,6 @@ static VKAPI_ATTR VkResult VKAPI_CALL StubGetMemoryWin32HandleNV(VkDevice device #endif // VK_USE_PLATFORM_WIN32_KHR static VKAPI_ATTR void VKAPI_CALL StubCmdBeginConditionalRenderingEXT(VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin) { }; static VKAPI_ATTR void VKAPI_CALL StubCmdEndConditionalRenderingEXT(VkCommandBuffer commandBuffer) { }; -static VKAPI_ATTR void VKAPI_CALL StubCmdProcessCommandsNVX(VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo) { }; -static VKAPI_ATTR void VKAPI_CALL StubCmdReserveSpaceForCommandsNVX(VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo) { }; -static VKAPI_ATTR VkResult VKAPI_CALL StubCreateIndirectCommandsLayoutNVX(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout) { return VK_SUCCESS; }; -static VKAPI_ATTR void VKAPI_CALL StubDestroyIndirectCommandsLayoutNVX(VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator) { }; -static VKAPI_ATTR VkResult VKAPI_CALL StubCreateObjectTableNVX(VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable) { return VK_SUCCESS; }; -static VKAPI_ATTR void VKAPI_CALL StubDestroyObjectTableNVX(VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator) { }; -static VKAPI_ATTR VkResult VKAPI_CALL StubRegisterObjectsNVX(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices) { return VK_SUCCESS; }; -static VKAPI_ATTR VkResult VKAPI_CALL StubUnregisterObjectsNVX(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices) { return VK_SUCCESS; }; static VKAPI_ATTR void VKAPI_CALL StubCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings) { }; static VKAPI_ATTR VkResult VKAPI_CALL StubDisplayPowerControlEXT(VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo) { return VK_SUCCESS; }; static VKAPI_ATTR VkResult VKAPI_CALL StubRegisterDeviceEventEXT(VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence) { return VK_SUCCESS; }; @@ -150,16 +164,20 @@ static VKAPI_ATTR void VKAPI_CALL StubCmdBindShadingRateImageNV(VkCommandBuffer static VKAPI_ATTR void VKAPI_CALL StubCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes) { }; static VKAPI_ATTR void VKAPI_CALL StubCmdSetCoarseSampleOrderNV(VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders) { }; static VKAPI_ATTR VkResult VKAPI_CALL StubCreateAccelerationStructureNV(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure) { return VK_SUCCESS; }; -static VKAPI_ATTR void VKAPI_CALL StubDestroyAccelerationStructureNV(VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator) { }; +static VKAPI_ATTR void VKAPI_CALL StubDestroyAccelerationStructureKHR(VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator) { }; +static VKAPI_ATTR void VKAPI_CALL StubDestroyAccelerationStructureNV(VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator) { }; static VKAPI_ATTR void VKAPI_CALL StubGetAccelerationStructureMemoryRequirementsNV(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) { }; -static VKAPI_ATTR VkResult VKAPI_CALL StubBindAccelerationStructureMemoryNV(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos) { return VK_SUCCESS; }; -static VKAPI_ATTR void VKAPI_CALL StubCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) { }; -static VKAPI_ATTR void VKAPI_CALL StubCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode) { }; +static VKAPI_ATTR VkResult VKAPI_CALL StubBindAccelerationStructureMemoryKHR(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos) { return VK_SUCCESS; }; +static VKAPI_ATTR VkResult VKAPI_CALL StubBindAccelerationStructureMemoryNV(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos) { return VK_SUCCESS; }; +static VKAPI_ATTR void VKAPI_CALL StubCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureKHR dst, VkAccelerationStructureKHR src, VkBuffer scratch, VkDeviceSize scratchOffset) { }; +static VKAPI_ATTR void VKAPI_CALL StubCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer, VkAccelerationStructureKHR dst, VkAccelerationStructureKHR src, VkCopyAccelerationStructureModeKHR mode) { }; static VKAPI_ATTR void VKAPI_CALL StubCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth) { }; static VKAPI_ATTR VkResult VKAPI_CALL StubCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) { return VK_SUCCESS; }; +static VKAPI_ATTR VkResult VKAPI_CALL StubGetRayTracingShaderGroupHandlesKHR(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData) { return VK_SUCCESS; }; static VKAPI_ATTR VkResult VKAPI_CALL StubGetRayTracingShaderGroupHandlesNV(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData) { return VK_SUCCESS; }; -static VKAPI_ATTR VkResult VKAPI_CALL StubGetAccelerationStructureHandleNV(VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData) { return VK_SUCCESS; }; -static VKAPI_ATTR void VKAPI_CALL StubCmdWriteAccelerationStructuresPropertiesNV(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) { }; +static VKAPI_ATTR VkResult VKAPI_CALL StubGetAccelerationStructureHandleNV(VkDevice device, VkAccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData) { return VK_SUCCESS; }; +static VKAPI_ATTR void VKAPI_CALL StubCmdWriteAccelerationStructuresPropertiesKHR(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) { }; +static VKAPI_ATTR void VKAPI_CALL StubCmdWriteAccelerationStructuresPropertiesNV(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) { }; static VKAPI_ATTR VkResult VKAPI_CALL StubCompileDeferredNV(VkDevice device, VkPipeline pipeline, uint32_t shader) { return VK_SUCCESS; }; static VKAPI_ATTR VkResult VKAPI_CALL StubGetMemoryHostPointerPropertiesEXT(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties) { return VK_SUCCESS; }; static VKAPI_ATTR void VKAPI_CALL StubCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) { }; @@ -180,7 +198,7 @@ static VKAPI_ATTR VkResult VKAPI_CALL StubReleasePerformanceConfigurationINTEL(V static VKAPI_ATTR VkResult VKAPI_CALL StubQueueSetPerformanceConfigurationINTEL(VkQueue queue, VkPerformanceConfigurationINTEL configuration) { return VK_SUCCESS; }; static VKAPI_ATTR VkResult VKAPI_CALL StubGetPerformanceParameterINTEL(VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue) { return VK_SUCCESS; }; static VKAPI_ATTR void VKAPI_CALL StubSetLocalDimmingAMD(VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable) { }; -static VKAPI_ATTR void VKAPI_CALL StubGetBufferDeviceAddressEXT(VkDevice device, const VkBufferDeviceAddressInfo* pInfo) { }; +static VKAPI_ATTR VkDeviceAddress VKAPI_CALL StubGetBufferDeviceAddressEXT(VkDevice device, const VkBufferDeviceAddressInfo* pInfo) { return 0L; }; #ifdef VK_USE_PLATFORM_WIN32_KHR static VKAPI_ATTR VkResult VKAPI_CALL StubAcquireFullScreenExclusiveModeEXT(VkDevice device, VkSwapchainKHR swapchain) { return VK_SUCCESS; }; #endif // VK_USE_PLATFORM_WIN32_KHR @@ -192,6 +210,82 @@ static VKAPI_ATTR VkResult VKAPI_CALL StubGetDeviceGroupSurfacePresentModes2EXT( #endif // VK_USE_PLATFORM_WIN32_KHR static VKAPI_ATTR void VKAPI_CALL StubCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern) { }; static VKAPI_ATTR void VKAPI_CALL StubResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) { }; +static VKAPI_ATTR void VKAPI_CALL StubCmdSetCullModeEXT(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode) { }; +static VKAPI_ATTR void VKAPI_CALL StubCmdSetFrontFaceEXT(VkCommandBuffer commandBuffer, VkFrontFace frontFace) { }; +static VKAPI_ATTR void VKAPI_CALL StubCmdSetPrimitiveTopologyEXT(VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology) { }; +static VKAPI_ATTR void VKAPI_CALL StubCmdSetViewportWithCountEXT(VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports) { }; +static VKAPI_ATTR void VKAPI_CALL StubCmdSetScissorWithCountEXT(VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors) { }; +static VKAPI_ATTR void VKAPI_CALL StubCmdBindVertexBuffers2EXT(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides) { }; +static VKAPI_ATTR void VKAPI_CALL StubCmdSetDepthTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable) { }; +static VKAPI_ATTR void VKAPI_CALL StubCmdSetDepthWriteEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable) { }; +static VKAPI_ATTR void VKAPI_CALL StubCmdSetDepthCompareOpEXT(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp) { }; +static VKAPI_ATTR void VKAPI_CALL StubCmdSetDepthBoundsTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable) { }; +static VKAPI_ATTR void VKAPI_CALL StubCmdSetStencilTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable) { }; +static VKAPI_ATTR void VKAPI_CALL StubCmdSetStencilOpEXT(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp) { }; +static VKAPI_ATTR void VKAPI_CALL StubGetGeneratedCommandsMemoryRequirementsNV(VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2* pMemoryRequirements) { }; +static VKAPI_ATTR void VKAPI_CALL StubCmdPreprocessGeneratedCommandsNV(VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo) { }; +static VKAPI_ATTR void VKAPI_CALL StubCmdExecuteGeneratedCommandsNV(VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo) { }; +static VKAPI_ATTR void VKAPI_CALL StubCmdBindPipelineShaderGroupNV(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline, uint32_t groupIndex) { }; +static VKAPI_ATTR VkResult VKAPI_CALL StubCreateIndirectCommandsLayoutNV(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNV* pIndirectCommandsLayout) { return VK_SUCCESS; }; +static VKAPI_ATTR void VKAPI_CALL StubDestroyIndirectCommandsLayoutNV(VkDevice device, VkIndirectCommandsLayoutNV indirectCommandsLayout, const VkAllocationCallbacks* pAllocator) { }; +static VKAPI_ATTR VkResult VKAPI_CALL StubCreatePrivateDataSlotEXT(VkDevice device, const VkPrivateDataSlotCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlotEXT* pPrivateDataSlot) { return VK_SUCCESS; }; +static VKAPI_ATTR void VKAPI_CALL StubDestroyPrivateDataSlotEXT(VkDevice device, VkPrivateDataSlotEXT privateDataSlot, const VkAllocationCallbacks* pAllocator) { }; +static VKAPI_ATTR VkResult VKAPI_CALL StubSetPrivateDataEXT(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlotEXT privateDataSlot, uint64_t data) { return VK_SUCCESS; }; +static VKAPI_ATTR void VKAPI_CALL StubGetPrivateDataEXT(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlotEXT privateDataSlot, uint64_t* pData) { }; +#ifdef VK_ENABLE_BETA_EXTENSIONS +static VKAPI_ATTR VkResult VKAPI_CALL StubCreateAccelerationStructureKHR(VkDevice device, const VkAccelerationStructureCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureKHR* pAccelerationStructure) { return VK_SUCCESS; }; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +static VKAPI_ATTR void VKAPI_CALL StubGetAccelerationStructureMemoryRequirementsKHR(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoKHR* pInfo, VkMemoryRequirements2* pMemoryRequirements) { }; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +static VKAPI_ATTR void VKAPI_CALL StubCmdBuildAccelerationStructureKHR(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos) { }; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +static VKAPI_ATTR void VKAPI_CALL StubCmdBuildAccelerationStructureIndirectKHR(VkCommandBuffer commandBuffer, const VkAccelerationStructureBuildGeometryInfoKHR* pInfo, VkBuffer indirectBuffer, VkDeviceSize indirectOffset, uint32_t indirectStride) { }; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +static VKAPI_ATTR VkResult VKAPI_CALL StubBuildAccelerationStructureKHR(VkDevice device, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos) { return VK_SUCCESS; }; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +static VKAPI_ATTR VkResult VKAPI_CALL StubCopyAccelerationStructureKHR(VkDevice device, const VkCopyAccelerationStructureInfoKHR* pInfo) { return VK_SUCCESS; }; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +static VKAPI_ATTR VkResult VKAPI_CALL StubCopyAccelerationStructureToMemoryKHR(VkDevice device, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo) { return VK_SUCCESS; }; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +static VKAPI_ATTR VkResult VKAPI_CALL StubCopyMemoryToAccelerationStructureKHR(VkDevice device, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo) { return VK_SUCCESS; }; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +static VKAPI_ATTR VkResult VKAPI_CALL StubWriteAccelerationStructuresPropertiesKHR(VkDevice device, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, size_t dataSize, void* pData, size_t stride) { return VK_SUCCESS; }; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +static VKAPI_ATTR void VKAPI_CALL StubCmdCopyAccelerationStructureKHR(VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR* pInfo) { }; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +static VKAPI_ATTR void VKAPI_CALL StubCmdCopyAccelerationStructureToMemoryKHR(VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo) { }; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +static VKAPI_ATTR void VKAPI_CALL StubCmdCopyMemoryToAccelerationStructureKHR(VkCommandBuffer commandBuffer, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo) { }; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +static VKAPI_ATTR void VKAPI_CALL StubCmdTraceRaysKHR(VkCommandBuffer commandBuffer, const VkStridedBufferRegionKHR* pRaygenShaderBindingTable, const VkStridedBufferRegionKHR* pMissShaderBindingTable, const VkStridedBufferRegionKHR* pHitShaderBindingTable, const VkStridedBufferRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth) { }; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +static VKAPI_ATTR VkResult VKAPI_CALL StubCreateRayTracingPipelinesKHR(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) { return VK_SUCCESS; }; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +static VKAPI_ATTR VkDeviceAddress VKAPI_CALL StubGetAccelerationStructureDeviceAddressKHR(VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR* pInfo) { return 0L; }; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +static VKAPI_ATTR VkResult VKAPI_CALL StubGetRayTracingCaptureReplayShaderGroupHandlesKHR(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData) { return VK_SUCCESS; }; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +static VKAPI_ATTR void VKAPI_CALL StubCmdTraceRaysIndirectKHR(VkCommandBuffer commandBuffer, const VkStridedBufferRegionKHR* pRaygenShaderBindingTable, const VkStridedBufferRegionKHR* pMissShaderBindingTable, const VkStridedBufferRegionKHR* pHitShaderBindingTable, const VkStridedBufferRegionKHR* pCallableShaderBindingTable, VkBuffer buffer, VkDeviceSize offset) { }; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +static VKAPI_ATTR VkResult VKAPI_CALL StubGetDeviceAccelerationStructureCompatibilityKHR(VkDevice device, const VkAccelerationStructureVersionKHR* version) { return VK_SUCCESS; }; +#endif // VK_ENABLE_BETA_EXTENSIONS @@ -466,12 +560,44 @@ static inline void layer_init_device_dispatch_table(VkDevice device, VkLayerDisp if (table->GetBufferOpaqueCaptureAddressKHR == nullptr) { table->GetBufferOpaqueCaptureAddressKHR = (PFN_vkGetBufferOpaqueCaptureAddressKHR)StubGetBufferOpaqueCaptureAddressKHR; } table->GetDeviceMemoryOpaqueCaptureAddressKHR = (PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR) gpa(device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR"); if (table->GetDeviceMemoryOpaqueCaptureAddressKHR == nullptr) { table->GetDeviceMemoryOpaqueCaptureAddressKHR = (PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR)StubGetDeviceMemoryOpaqueCaptureAddressKHR; } +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->CreateDeferredOperationKHR = (PFN_vkCreateDeferredOperationKHR) gpa(device, "vkCreateDeferredOperationKHR"); + if (table->CreateDeferredOperationKHR == nullptr) { table->CreateDeferredOperationKHR = (PFN_vkCreateDeferredOperationKHR)StubCreateDeferredOperationKHR; } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->DestroyDeferredOperationKHR = (PFN_vkDestroyDeferredOperationKHR) gpa(device, "vkDestroyDeferredOperationKHR"); + if (table->DestroyDeferredOperationKHR == nullptr) { table->DestroyDeferredOperationKHR = (PFN_vkDestroyDeferredOperationKHR)StubDestroyDeferredOperationKHR; } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->GetDeferredOperationMaxConcurrencyKHR = (PFN_vkGetDeferredOperationMaxConcurrencyKHR) gpa(device, "vkGetDeferredOperationMaxConcurrencyKHR"); + if (table->GetDeferredOperationMaxConcurrencyKHR == nullptr) { table->GetDeferredOperationMaxConcurrencyKHR = (PFN_vkGetDeferredOperationMaxConcurrencyKHR)StubGetDeferredOperationMaxConcurrencyKHR; } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->GetDeferredOperationResultKHR = (PFN_vkGetDeferredOperationResultKHR) gpa(device, "vkGetDeferredOperationResultKHR"); + if (table->GetDeferredOperationResultKHR == nullptr) { table->GetDeferredOperationResultKHR = (PFN_vkGetDeferredOperationResultKHR)StubGetDeferredOperationResultKHR; } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->DeferredOperationJoinKHR = (PFN_vkDeferredOperationJoinKHR) gpa(device, "vkDeferredOperationJoinKHR"); + if (table->DeferredOperationJoinKHR == nullptr) { table->DeferredOperationJoinKHR = (PFN_vkDeferredOperationJoinKHR)StubDeferredOperationJoinKHR; } +#endif // VK_ENABLE_BETA_EXTENSIONS table->GetPipelineExecutablePropertiesKHR = (PFN_vkGetPipelineExecutablePropertiesKHR) gpa(device, "vkGetPipelineExecutablePropertiesKHR"); if (table->GetPipelineExecutablePropertiesKHR == nullptr) { table->GetPipelineExecutablePropertiesKHR = (PFN_vkGetPipelineExecutablePropertiesKHR)StubGetPipelineExecutablePropertiesKHR; } table->GetPipelineExecutableStatisticsKHR = (PFN_vkGetPipelineExecutableStatisticsKHR) gpa(device, "vkGetPipelineExecutableStatisticsKHR"); if (table->GetPipelineExecutableStatisticsKHR == nullptr) { table->GetPipelineExecutableStatisticsKHR = (PFN_vkGetPipelineExecutableStatisticsKHR)StubGetPipelineExecutableStatisticsKHR; } table->GetPipelineExecutableInternalRepresentationsKHR = (PFN_vkGetPipelineExecutableInternalRepresentationsKHR) gpa(device, "vkGetPipelineExecutableInternalRepresentationsKHR"); if (table->GetPipelineExecutableInternalRepresentationsKHR == nullptr) { table->GetPipelineExecutableInternalRepresentationsKHR = (PFN_vkGetPipelineExecutableInternalRepresentationsKHR)StubGetPipelineExecutableInternalRepresentationsKHR; } + table->CmdCopyBuffer2KHR = (PFN_vkCmdCopyBuffer2KHR) gpa(device, "vkCmdCopyBuffer2KHR"); + if (table->CmdCopyBuffer2KHR == nullptr) { table->CmdCopyBuffer2KHR = (PFN_vkCmdCopyBuffer2KHR)StubCmdCopyBuffer2KHR; } + table->CmdCopyImage2KHR = (PFN_vkCmdCopyImage2KHR) gpa(device, "vkCmdCopyImage2KHR"); + if (table->CmdCopyImage2KHR == nullptr) { table->CmdCopyImage2KHR = (PFN_vkCmdCopyImage2KHR)StubCmdCopyImage2KHR; } + table->CmdCopyBufferToImage2KHR = (PFN_vkCmdCopyBufferToImage2KHR) gpa(device, "vkCmdCopyBufferToImage2KHR"); + if (table->CmdCopyBufferToImage2KHR == nullptr) { table->CmdCopyBufferToImage2KHR = (PFN_vkCmdCopyBufferToImage2KHR)StubCmdCopyBufferToImage2KHR; } + table->CmdCopyImageToBuffer2KHR = (PFN_vkCmdCopyImageToBuffer2KHR) gpa(device, "vkCmdCopyImageToBuffer2KHR"); + if (table->CmdCopyImageToBuffer2KHR == nullptr) { table->CmdCopyImageToBuffer2KHR = (PFN_vkCmdCopyImageToBuffer2KHR)StubCmdCopyImageToBuffer2KHR; } + table->CmdBlitImage2KHR = (PFN_vkCmdBlitImage2KHR) gpa(device, "vkCmdBlitImage2KHR"); + if (table->CmdBlitImage2KHR == nullptr) { table->CmdBlitImage2KHR = (PFN_vkCmdBlitImage2KHR)StubCmdBlitImage2KHR; } + table->CmdResolveImage2KHR = (PFN_vkCmdResolveImage2KHR) gpa(device, "vkCmdResolveImage2KHR"); + if (table->CmdResolveImage2KHR == nullptr) { table->CmdResolveImage2KHR = (PFN_vkCmdResolveImage2KHR)StubCmdResolveImage2KHR; } table->DebugMarkerSetObjectTagEXT = (PFN_vkDebugMarkerSetObjectTagEXT) gpa(device, "vkDebugMarkerSetObjectTagEXT"); if (table->DebugMarkerSetObjectTagEXT == nullptr) { table->DebugMarkerSetObjectTagEXT = (PFN_vkDebugMarkerSetObjectTagEXT)StubDebugMarkerSetObjectTagEXT; } table->DebugMarkerSetObjectNameEXT = (PFN_vkDebugMarkerSetObjectNameEXT) gpa(device, "vkDebugMarkerSetObjectNameEXT"); @@ -496,6 +622,8 @@ static inline void layer_init_device_dispatch_table(VkDevice device, VkLayerDisp if (table->CmdDrawIndirectByteCountEXT == nullptr) { table->CmdDrawIndirectByteCountEXT = (PFN_vkCmdDrawIndirectByteCountEXT)StubCmdDrawIndirectByteCountEXT; } table->GetImageViewHandleNVX = (PFN_vkGetImageViewHandleNVX) gpa(device, "vkGetImageViewHandleNVX"); if (table->GetImageViewHandleNVX == nullptr) { table->GetImageViewHandleNVX = (PFN_vkGetImageViewHandleNVX)StubGetImageViewHandleNVX; } + table->GetImageViewAddressNVX = (PFN_vkGetImageViewAddressNVX) gpa(device, "vkGetImageViewAddressNVX"); + if (table->GetImageViewAddressNVX == nullptr) { table->GetImageViewAddressNVX = (PFN_vkGetImageViewAddressNVX)StubGetImageViewAddressNVX; } table->CmdDrawIndirectCountAMD = (PFN_vkCmdDrawIndirectCountAMD) gpa(device, "vkCmdDrawIndirectCountAMD"); if (table->CmdDrawIndirectCountAMD == nullptr) { table->CmdDrawIndirectCountAMD = (PFN_vkCmdDrawIndirectCountAMD)StubCmdDrawIndirectCountAMD; } table->CmdDrawIndexedIndirectCountAMD = (PFN_vkCmdDrawIndexedIndirectCountAMD) gpa(device, "vkCmdDrawIndexedIndirectCountAMD"); @@ -510,22 +638,6 @@ static inline void layer_init_device_dispatch_table(VkDevice device, VkLayerDisp if (table->CmdBeginConditionalRenderingEXT == nullptr) { table->CmdBeginConditionalRenderingEXT = (PFN_vkCmdBeginConditionalRenderingEXT)StubCmdBeginConditionalRenderingEXT; } table->CmdEndConditionalRenderingEXT = (PFN_vkCmdEndConditionalRenderingEXT) gpa(device, "vkCmdEndConditionalRenderingEXT"); if (table->CmdEndConditionalRenderingEXT == nullptr) { table->CmdEndConditionalRenderingEXT = (PFN_vkCmdEndConditionalRenderingEXT)StubCmdEndConditionalRenderingEXT; } - table->CmdProcessCommandsNVX = (PFN_vkCmdProcessCommandsNVX) gpa(device, "vkCmdProcessCommandsNVX"); - if (table->CmdProcessCommandsNVX == nullptr) { table->CmdProcessCommandsNVX = (PFN_vkCmdProcessCommandsNVX)StubCmdProcessCommandsNVX; } - table->CmdReserveSpaceForCommandsNVX = (PFN_vkCmdReserveSpaceForCommandsNVX) gpa(device, "vkCmdReserveSpaceForCommandsNVX"); - if (table->CmdReserveSpaceForCommandsNVX == nullptr) { table->CmdReserveSpaceForCommandsNVX = (PFN_vkCmdReserveSpaceForCommandsNVX)StubCmdReserveSpaceForCommandsNVX; } - table->CreateIndirectCommandsLayoutNVX = (PFN_vkCreateIndirectCommandsLayoutNVX) gpa(device, "vkCreateIndirectCommandsLayoutNVX"); - if (table->CreateIndirectCommandsLayoutNVX == nullptr) { table->CreateIndirectCommandsLayoutNVX = (PFN_vkCreateIndirectCommandsLayoutNVX)StubCreateIndirectCommandsLayoutNVX; } - table->DestroyIndirectCommandsLayoutNVX = (PFN_vkDestroyIndirectCommandsLayoutNVX) gpa(device, "vkDestroyIndirectCommandsLayoutNVX"); - if (table->DestroyIndirectCommandsLayoutNVX == nullptr) { table->DestroyIndirectCommandsLayoutNVX = (PFN_vkDestroyIndirectCommandsLayoutNVX)StubDestroyIndirectCommandsLayoutNVX; } - table->CreateObjectTableNVX = (PFN_vkCreateObjectTableNVX) gpa(device, "vkCreateObjectTableNVX"); - if (table->CreateObjectTableNVX == nullptr) { table->CreateObjectTableNVX = (PFN_vkCreateObjectTableNVX)StubCreateObjectTableNVX; } - table->DestroyObjectTableNVX = (PFN_vkDestroyObjectTableNVX) gpa(device, "vkDestroyObjectTableNVX"); - if (table->DestroyObjectTableNVX == nullptr) { table->DestroyObjectTableNVX = (PFN_vkDestroyObjectTableNVX)StubDestroyObjectTableNVX; } - table->RegisterObjectsNVX = (PFN_vkRegisterObjectsNVX) gpa(device, "vkRegisterObjectsNVX"); - if (table->RegisterObjectsNVX == nullptr) { table->RegisterObjectsNVX = (PFN_vkRegisterObjectsNVX)StubRegisterObjectsNVX; } - table->UnregisterObjectsNVX = (PFN_vkUnregisterObjectsNVX) gpa(device, "vkUnregisterObjectsNVX"); - if (table->UnregisterObjectsNVX == nullptr) { table->UnregisterObjectsNVX = (PFN_vkUnregisterObjectsNVX)StubUnregisterObjectsNVX; } table->CmdSetViewportWScalingNV = (PFN_vkCmdSetViewportWScalingNV) gpa(device, "vkCmdSetViewportWScalingNV"); if (table->CmdSetViewportWScalingNV == nullptr) { table->CmdSetViewportWScalingNV = (PFN_vkCmdSetViewportWScalingNV)StubCmdSetViewportWScalingNV; } table->DisplayPowerControlEXT = (PFN_vkDisplayPowerControlEXT) gpa(device, "vkDisplayPowerControlEXT"); @@ -580,10 +692,14 @@ static inline void layer_init_device_dispatch_table(VkDevice device, VkLayerDisp if (table->CmdSetCoarseSampleOrderNV == nullptr) { table->CmdSetCoarseSampleOrderNV = (PFN_vkCmdSetCoarseSampleOrderNV)StubCmdSetCoarseSampleOrderNV; } table->CreateAccelerationStructureNV = (PFN_vkCreateAccelerationStructureNV) gpa(device, "vkCreateAccelerationStructureNV"); if (table->CreateAccelerationStructureNV == nullptr) { table->CreateAccelerationStructureNV = (PFN_vkCreateAccelerationStructureNV)StubCreateAccelerationStructureNV; } + table->DestroyAccelerationStructureKHR = (PFN_vkDestroyAccelerationStructureKHR) gpa(device, "vkDestroyAccelerationStructureKHR"); + if (table->DestroyAccelerationStructureKHR == nullptr) { table->DestroyAccelerationStructureKHR = (PFN_vkDestroyAccelerationStructureKHR)StubDestroyAccelerationStructureKHR; } table->DestroyAccelerationStructureNV = (PFN_vkDestroyAccelerationStructureNV) gpa(device, "vkDestroyAccelerationStructureNV"); if (table->DestroyAccelerationStructureNV == nullptr) { table->DestroyAccelerationStructureNV = (PFN_vkDestroyAccelerationStructureNV)StubDestroyAccelerationStructureNV; } table->GetAccelerationStructureMemoryRequirementsNV = (PFN_vkGetAccelerationStructureMemoryRequirementsNV) gpa(device, "vkGetAccelerationStructureMemoryRequirementsNV"); if (table->GetAccelerationStructureMemoryRequirementsNV == nullptr) { table->GetAccelerationStructureMemoryRequirementsNV = (PFN_vkGetAccelerationStructureMemoryRequirementsNV)StubGetAccelerationStructureMemoryRequirementsNV; } + table->BindAccelerationStructureMemoryKHR = (PFN_vkBindAccelerationStructureMemoryKHR) gpa(device, "vkBindAccelerationStructureMemoryKHR"); + if (table->BindAccelerationStructureMemoryKHR == nullptr) { table->BindAccelerationStructureMemoryKHR = (PFN_vkBindAccelerationStructureMemoryKHR)StubBindAccelerationStructureMemoryKHR; } table->BindAccelerationStructureMemoryNV = (PFN_vkBindAccelerationStructureMemoryNV) gpa(device, "vkBindAccelerationStructureMemoryNV"); if (table->BindAccelerationStructureMemoryNV == nullptr) { table->BindAccelerationStructureMemoryNV = (PFN_vkBindAccelerationStructureMemoryNV)StubBindAccelerationStructureMemoryNV; } table->CmdBuildAccelerationStructureNV = (PFN_vkCmdBuildAccelerationStructureNV) gpa(device, "vkCmdBuildAccelerationStructureNV"); @@ -594,10 +710,14 @@ static inline void layer_init_device_dispatch_table(VkDevice device, VkLayerDisp if (table->CmdTraceRaysNV == nullptr) { table->CmdTraceRaysNV = (PFN_vkCmdTraceRaysNV)StubCmdTraceRaysNV; } table->CreateRayTracingPipelinesNV = (PFN_vkCreateRayTracingPipelinesNV) gpa(device, "vkCreateRayTracingPipelinesNV"); if (table->CreateRayTracingPipelinesNV == nullptr) { table->CreateRayTracingPipelinesNV = (PFN_vkCreateRayTracingPipelinesNV)StubCreateRayTracingPipelinesNV; } + table->GetRayTracingShaderGroupHandlesKHR = (PFN_vkGetRayTracingShaderGroupHandlesKHR) gpa(device, "vkGetRayTracingShaderGroupHandlesKHR"); + if (table->GetRayTracingShaderGroupHandlesKHR == nullptr) { table->GetRayTracingShaderGroupHandlesKHR = (PFN_vkGetRayTracingShaderGroupHandlesKHR)StubGetRayTracingShaderGroupHandlesKHR; } table->GetRayTracingShaderGroupHandlesNV = (PFN_vkGetRayTracingShaderGroupHandlesNV) gpa(device, "vkGetRayTracingShaderGroupHandlesNV"); if (table->GetRayTracingShaderGroupHandlesNV == nullptr) { table->GetRayTracingShaderGroupHandlesNV = (PFN_vkGetRayTracingShaderGroupHandlesNV)StubGetRayTracingShaderGroupHandlesNV; } table->GetAccelerationStructureHandleNV = (PFN_vkGetAccelerationStructureHandleNV) gpa(device, "vkGetAccelerationStructureHandleNV"); if (table->GetAccelerationStructureHandleNV == nullptr) { table->GetAccelerationStructureHandleNV = (PFN_vkGetAccelerationStructureHandleNV)StubGetAccelerationStructureHandleNV; } + table->CmdWriteAccelerationStructuresPropertiesKHR = (PFN_vkCmdWriteAccelerationStructuresPropertiesKHR) gpa(device, "vkCmdWriteAccelerationStructuresPropertiesKHR"); + if (table->CmdWriteAccelerationStructuresPropertiesKHR == nullptr) { table->CmdWriteAccelerationStructuresPropertiesKHR = (PFN_vkCmdWriteAccelerationStructuresPropertiesKHR)StubCmdWriteAccelerationStructuresPropertiesKHR; } table->CmdWriteAccelerationStructuresPropertiesNV = (PFN_vkCmdWriteAccelerationStructuresPropertiesNV) gpa(device, "vkCmdWriteAccelerationStructuresPropertiesNV"); if (table->CmdWriteAccelerationStructuresPropertiesNV == nullptr) { table->CmdWriteAccelerationStructuresPropertiesNV = (PFN_vkCmdWriteAccelerationStructuresPropertiesNV)StubCmdWriteAccelerationStructuresPropertiesNV; } table->CompileDeferredNV = (PFN_vkCompileDeferredNV) gpa(device, "vkCompileDeferredNV"); @@ -658,6 +778,122 @@ static inline void layer_init_device_dispatch_table(VkDevice device, VkLayerDisp if (table->CmdSetLineStippleEXT == nullptr) { table->CmdSetLineStippleEXT = (PFN_vkCmdSetLineStippleEXT)StubCmdSetLineStippleEXT; } table->ResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT) gpa(device, "vkResetQueryPoolEXT"); if (table->ResetQueryPoolEXT == nullptr) { table->ResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT)StubResetQueryPoolEXT; } + table->CmdSetCullModeEXT = (PFN_vkCmdSetCullModeEXT) gpa(device, "vkCmdSetCullModeEXT"); + if (table->CmdSetCullModeEXT == nullptr) { table->CmdSetCullModeEXT = (PFN_vkCmdSetCullModeEXT)StubCmdSetCullModeEXT; } + table->CmdSetFrontFaceEXT = (PFN_vkCmdSetFrontFaceEXT) gpa(device, "vkCmdSetFrontFaceEXT"); + if (table->CmdSetFrontFaceEXT == nullptr) { table->CmdSetFrontFaceEXT = (PFN_vkCmdSetFrontFaceEXT)StubCmdSetFrontFaceEXT; } + table->CmdSetPrimitiveTopologyEXT = (PFN_vkCmdSetPrimitiveTopologyEXT) gpa(device, "vkCmdSetPrimitiveTopologyEXT"); + if (table->CmdSetPrimitiveTopologyEXT == nullptr) { table->CmdSetPrimitiveTopologyEXT = (PFN_vkCmdSetPrimitiveTopologyEXT)StubCmdSetPrimitiveTopologyEXT; } + table->CmdSetViewportWithCountEXT = (PFN_vkCmdSetViewportWithCountEXT) gpa(device, "vkCmdSetViewportWithCountEXT"); + if (table->CmdSetViewportWithCountEXT == nullptr) { table->CmdSetViewportWithCountEXT = (PFN_vkCmdSetViewportWithCountEXT)StubCmdSetViewportWithCountEXT; } + table->CmdSetScissorWithCountEXT = (PFN_vkCmdSetScissorWithCountEXT) gpa(device, "vkCmdSetScissorWithCountEXT"); + if (table->CmdSetScissorWithCountEXT == nullptr) { table->CmdSetScissorWithCountEXT = (PFN_vkCmdSetScissorWithCountEXT)StubCmdSetScissorWithCountEXT; } + table->CmdBindVertexBuffers2EXT = (PFN_vkCmdBindVertexBuffers2EXT) gpa(device, "vkCmdBindVertexBuffers2EXT"); + if (table->CmdBindVertexBuffers2EXT == nullptr) { table->CmdBindVertexBuffers2EXT = (PFN_vkCmdBindVertexBuffers2EXT)StubCmdBindVertexBuffers2EXT; } + table->CmdSetDepthTestEnableEXT = (PFN_vkCmdSetDepthTestEnableEXT) gpa(device, "vkCmdSetDepthTestEnableEXT"); + if (table->CmdSetDepthTestEnableEXT == nullptr) { table->CmdSetDepthTestEnableEXT = (PFN_vkCmdSetDepthTestEnableEXT)StubCmdSetDepthTestEnableEXT; } + table->CmdSetDepthWriteEnableEXT = (PFN_vkCmdSetDepthWriteEnableEXT) gpa(device, "vkCmdSetDepthWriteEnableEXT"); + if (table->CmdSetDepthWriteEnableEXT == nullptr) { table->CmdSetDepthWriteEnableEXT = (PFN_vkCmdSetDepthWriteEnableEXT)StubCmdSetDepthWriteEnableEXT; } + table->CmdSetDepthCompareOpEXT = (PFN_vkCmdSetDepthCompareOpEXT) gpa(device, "vkCmdSetDepthCompareOpEXT"); + if (table->CmdSetDepthCompareOpEXT == nullptr) { table->CmdSetDepthCompareOpEXT = (PFN_vkCmdSetDepthCompareOpEXT)StubCmdSetDepthCompareOpEXT; } + table->CmdSetDepthBoundsTestEnableEXT = (PFN_vkCmdSetDepthBoundsTestEnableEXT) gpa(device, "vkCmdSetDepthBoundsTestEnableEXT"); + if (table->CmdSetDepthBoundsTestEnableEXT == nullptr) { table->CmdSetDepthBoundsTestEnableEXT = (PFN_vkCmdSetDepthBoundsTestEnableEXT)StubCmdSetDepthBoundsTestEnableEXT; } + table->CmdSetStencilTestEnableEXT = (PFN_vkCmdSetStencilTestEnableEXT) gpa(device, "vkCmdSetStencilTestEnableEXT"); + if (table->CmdSetStencilTestEnableEXT == nullptr) { table->CmdSetStencilTestEnableEXT = (PFN_vkCmdSetStencilTestEnableEXT)StubCmdSetStencilTestEnableEXT; } + table->CmdSetStencilOpEXT = (PFN_vkCmdSetStencilOpEXT) gpa(device, "vkCmdSetStencilOpEXT"); + if (table->CmdSetStencilOpEXT == nullptr) { table->CmdSetStencilOpEXT = (PFN_vkCmdSetStencilOpEXT)StubCmdSetStencilOpEXT; } + table->GetGeneratedCommandsMemoryRequirementsNV = (PFN_vkGetGeneratedCommandsMemoryRequirementsNV) gpa(device, "vkGetGeneratedCommandsMemoryRequirementsNV"); + if (table->GetGeneratedCommandsMemoryRequirementsNV == nullptr) { table->GetGeneratedCommandsMemoryRequirementsNV = (PFN_vkGetGeneratedCommandsMemoryRequirementsNV)StubGetGeneratedCommandsMemoryRequirementsNV; } + table->CmdPreprocessGeneratedCommandsNV = (PFN_vkCmdPreprocessGeneratedCommandsNV) gpa(device, "vkCmdPreprocessGeneratedCommandsNV"); + if (table->CmdPreprocessGeneratedCommandsNV == nullptr) { table->CmdPreprocessGeneratedCommandsNV = (PFN_vkCmdPreprocessGeneratedCommandsNV)StubCmdPreprocessGeneratedCommandsNV; } + table->CmdExecuteGeneratedCommandsNV = (PFN_vkCmdExecuteGeneratedCommandsNV) gpa(device, "vkCmdExecuteGeneratedCommandsNV"); + if (table->CmdExecuteGeneratedCommandsNV == nullptr) { table->CmdExecuteGeneratedCommandsNV = (PFN_vkCmdExecuteGeneratedCommandsNV)StubCmdExecuteGeneratedCommandsNV; } + table->CmdBindPipelineShaderGroupNV = (PFN_vkCmdBindPipelineShaderGroupNV) gpa(device, "vkCmdBindPipelineShaderGroupNV"); + if (table->CmdBindPipelineShaderGroupNV == nullptr) { table->CmdBindPipelineShaderGroupNV = (PFN_vkCmdBindPipelineShaderGroupNV)StubCmdBindPipelineShaderGroupNV; } + table->CreateIndirectCommandsLayoutNV = (PFN_vkCreateIndirectCommandsLayoutNV) gpa(device, "vkCreateIndirectCommandsLayoutNV"); + if (table->CreateIndirectCommandsLayoutNV == nullptr) { table->CreateIndirectCommandsLayoutNV = (PFN_vkCreateIndirectCommandsLayoutNV)StubCreateIndirectCommandsLayoutNV; } + table->DestroyIndirectCommandsLayoutNV = (PFN_vkDestroyIndirectCommandsLayoutNV) gpa(device, "vkDestroyIndirectCommandsLayoutNV"); + if (table->DestroyIndirectCommandsLayoutNV == nullptr) { table->DestroyIndirectCommandsLayoutNV = (PFN_vkDestroyIndirectCommandsLayoutNV)StubDestroyIndirectCommandsLayoutNV; } + table->CreatePrivateDataSlotEXT = (PFN_vkCreatePrivateDataSlotEXT) gpa(device, "vkCreatePrivateDataSlotEXT"); + if (table->CreatePrivateDataSlotEXT == nullptr) { table->CreatePrivateDataSlotEXT = (PFN_vkCreatePrivateDataSlotEXT)StubCreatePrivateDataSlotEXT; } + table->DestroyPrivateDataSlotEXT = (PFN_vkDestroyPrivateDataSlotEXT) gpa(device, "vkDestroyPrivateDataSlotEXT"); + if (table->DestroyPrivateDataSlotEXT == nullptr) { table->DestroyPrivateDataSlotEXT = (PFN_vkDestroyPrivateDataSlotEXT)StubDestroyPrivateDataSlotEXT; } + table->SetPrivateDataEXT = (PFN_vkSetPrivateDataEXT) gpa(device, "vkSetPrivateDataEXT"); + if (table->SetPrivateDataEXT == nullptr) { table->SetPrivateDataEXT = (PFN_vkSetPrivateDataEXT)StubSetPrivateDataEXT; } + table->GetPrivateDataEXT = (PFN_vkGetPrivateDataEXT) gpa(device, "vkGetPrivateDataEXT"); + if (table->GetPrivateDataEXT == nullptr) { table->GetPrivateDataEXT = (PFN_vkGetPrivateDataEXT)StubGetPrivateDataEXT; } +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->CreateAccelerationStructureKHR = (PFN_vkCreateAccelerationStructureKHR) gpa(device, "vkCreateAccelerationStructureKHR"); + if (table->CreateAccelerationStructureKHR == nullptr) { table->CreateAccelerationStructureKHR = (PFN_vkCreateAccelerationStructureKHR)StubCreateAccelerationStructureKHR; } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->GetAccelerationStructureMemoryRequirementsKHR = (PFN_vkGetAccelerationStructureMemoryRequirementsKHR) gpa(device, "vkGetAccelerationStructureMemoryRequirementsKHR"); + if (table->GetAccelerationStructureMemoryRequirementsKHR == nullptr) { table->GetAccelerationStructureMemoryRequirementsKHR = (PFN_vkGetAccelerationStructureMemoryRequirementsKHR)StubGetAccelerationStructureMemoryRequirementsKHR; } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->CmdBuildAccelerationStructureKHR = (PFN_vkCmdBuildAccelerationStructureKHR) gpa(device, "vkCmdBuildAccelerationStructureKHR"); + if (table->CmdBuildAccelerationStructureKHR == nullptr) { table->CmdBuildAccelerationStructureKHR = (PFN_vkCmdBuildAccelerationStructureKHR)StubCmdBuildAccelerationStructureKHR; } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->CmdBuildAccelerationStructureIndirectKHR = (PFN_vkCmdBuildAccelerationStructureIndirectKHR) gpa(device, "vkCmdBuildAccelerationStructureIndirectKHR"); + if (table->CmdBuildAccelerationStructureIndirectKHR == nullptr) { table->CmdBuildAccelerationStructureIndirectKHR = (PFN_vkCmdBuildAccelerationStructureIndirectKHR)StubCmdBuildAccelerationStructureIndirectKHR; } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->BuildAccelerationStructureKHR = (PFN_vkBuildAccelerationStructureKHR) gpa(device, "vkBuildAccelerationStructureKHR"); + if (table->BuildAccelerationStructureKHR == nullptr) { table->BuildAccelerationStructureKHR = (PFN_vkBuildAccelerationStructureKHR)StubBuildAccelerationStructureKHR; } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->CopyAccelerationStructureKHR = (PFN_vkCopyAccelerationStructureKHR) gpa(device, "vkCopyAccelerationStructureKHR"); + if (table->CopyAccelerationStructureKHR == nullptr) { table->CopyAccelerationStructureKHR = (PFN_vkCopyAccelerationStructureKHR)StubCopyAccelerationStructureKHR; } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->CopyAccelerationStructureToMemoryKHR = (PFN_vkCopyAccelerationStructureToMemoryKHR) gpa(device, "vkCopyAccelerationStructureToMemoryKHR"); + if (table->CopyAccelerationStructureToMemoryKHR == nullptr) { table->CopyAccelerationStructureToMemoryKHR = (PFN_vkCopyAccelerationStructureToMemoryKHR)StubCopyAccelerationStructureToMemoryKHR; } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->CopyMemoryToAccelerationStructureKHR = (PFN_vkCopyMemoryToAccelerationStructureKHR) gpa(device, "vkCopyMemoryToAccelerationStructureKHR"); + if (table->CopyMemoryToAccelerationStructureKHR == nullptr) { table->CopyMemoryToAccelerationStructureKHR = (PFN_vkCopyMemoryToAccelerationStructureKHR)StubCopyMemoryToAccelerationStructureKHR; } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->WriteAccelerationStructuresPropertiesKHR = (PFN_vkWriteAccelerationStructuresPropertiesKHR) gpa(device, "vkWriteAccelerationStructuresPropertiesKHR"); + if (table->WriteAccelerationStructuresPropertiesKHR == nullptr) { table->WriteAccelerationStructuresPropertiesKHR = (PFN_vkWriteAccelerationStructuresPropertiesKHR)StubWriteAccelerationStructuresPropertiesKHR; } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->CmdCopyAccelerationStructureKHR = (PFN_vkCmdCopyAccelerationStructureKHR) gpa(device, "vkCmdCopyAccelerationStructureKHR"); + if (table->CmdCopyAccelerationStructureKHR == nullptr) { table->CmdCopyAccelerationStructureKHR = (PFN_vkCmdCopyAccelerationStructureKHR)StubCmdCopyAccelerationStructureKHR; } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->CmdCopyAccelerationStructureToMemoryKHR = (PFN_vkCmdCopyAccelerationStructureToMemoryKHR) gpa(device, "vkCmdCopyAccelerationStructureToMemoryKHR"); + if (table->CmdCopyAccelerationStructureToMemoryKHR == nullptr) { table->CmdCopyAccelerationStructureToMemoryKHR = (PFN_vkCmdCopyAccelerationStructureToMemoryKHR)StubCmdCopyAccelerationStructureToMemoryKHR; } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->CmdCopyMemoryToAccelerationStructureKHR = (PFN_vkCmdCopyMemoryToAccelerationStructureKHR) gpa(device, "vkCmdCopyMemoryToAccelerationStructureKHR"); + if (table->CmdCopyMemoryToAccelerationStructureKHR == nullptr) { table->CmdCopyMemoryToAccelerationStructureKHR = (PFN_vkCmdCopyMemoryToAccelerationStructureKHR)StubCmdCopyMemoryToAccelerationStructureKHR; } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->CmdTraceRaysKHR = (PFN_vkCmdTraceRaysKHR) gpa(device, "vkCmdTraceRaysKHR"); + if (table->CmdTraceRaysKHR == nullptr) { table->CmdTraceRaysKHR = (PFN_vkCmdTraceRaysKHR)StubCmdTraceRaysKHR; } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->CreateRayTracingPipelinesKHR = (PFN_vkCreateRayTracingPipelinesKHR) gpa(device, "vkCreateRayTracingPipelinesKHR"); + if (table->CreateRayTracingPipelinesKHR == nullptr) { table->CreateRayTracingPipelinesKHR = (PFN_vkCreateRayTracingPipelinesKHR)StubCreateRayTracingPipelinesKHR; } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->GetAccelerationStructureDeviceAddressKHR = (PFN_vkGetAccelerationStructureDeviceAddressKHR) gpa(device, "vkGetAccelerationStructureDeviceAddressKHR"); + if (table->GetAccelerationStructureDeviceAddressKHR == nullptr) { table->GetAccelerationStructureDeviceAddressKHR = (PFN_vkGetAccelerationStructureDeviceAddressKHR)StubGetAccelerationStructureDeviceAddressKHR; } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->GetRayTracingCaptureReplayShaderGroupHandlesKHR = (PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR) gpa(device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR"); + if (table->GetRayTracingCaptureReplayShaderGroupHandlesKHR == nullptr) { table->GetRayTracingCaptureReplayShaderGroupHandlesKHR = (PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR)StubGetRayTracingCaptureReplayShaderGroupHandlesKHR; } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->CmdTraceRaysIndirectKHR = (PFN_vkCmdTraceRaysIndirectKHR) gpa(device, "vkCmdTraceRaysIndirectKHR"); + if (table->CmdTraceRaysIndirectKHR == nullptr) { table->CmdTraceRaysIndirectKHR = (PFN_vkCmdTraceRaysIndirectKHR)StubCmdTraceRaysIndirectKHR; } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->GetDeviceAccelerationStructureCompatibilityKHR = (PFN_vkGetDeviceAccelerationStructureCompatibilityKHR) gpa(device, "vkGetDeviceAccelerationStructureCompatibilityKHR"); + if (table->GetDeviceAccelerationStructureCompatibilityKHR == nullptr) { table->GetDeviceAccelerationStructureCompatibilityKHR = (PFN_vkGetDeviceAccelerationStructureCompatibilityKHR)StubGetDeviceAccelerationStructureCompatibilityKHR; } +#endif // VK_ENABLE_BETA_EXTENSIONS } @@ -756,7 +992,6 @@ static inline void layer_init_instance_dispatch_table(VkInstance instance, VkLay #ifdef VK_USE_PLATFORM_VI_NN table->CreateViSurfaceNN = (PFN_vkCreateViSurfaceNN) gpa(instance, "vkCreateViSurfaceNN"); #endif // VK_USE_PLATFORM_VI_NN - table->GetPhysicalDeviceGeneratedCommandsPropertiesNVX = (PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX) gpa(instance, "vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX"); table->ReleaseDisplayEXT = (PFN_vkReleaseDisplayEXT) gpa(instance, "vkReleaseDisplayEXT"); #ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT table->AcquireXlibDisplayEXT = (PFN_vkAcquireXlibDisplayEXT) gpa(instance, "vkAcquireXlibDisplayEXT"); @@ -789,4 +1024,10 @@ static inline void layer_init_instance_dispatch_table(VkInstance instance, VkLay table->GetPhysicalDeviceSurfacePresentModes2EXT = (PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT) gpa(instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT"); #endif // VK_USE_PLATFORM_WIN32_KHR table->CreateHeadlessSurfaceEXT = (PFN_vkCreateHeadlessSurfaceEXT) gpa(instance, "vkCreateHeadlessSurfaceEXT"); +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + table->CreateDirectFBSurfaceEXT = (PFN_vkCreateDirectFBSurfaceEXT) gpa(instance, "vkCreateDirectFBSurfaceEXT"); +#endif // VK_USE_PLATFORM_DIRECTFB_EXT +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + table->GetPhysicalDeviceDirectFBPresentationSupportEXT = (PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT) gpa(instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT"); +#endif // VK_USE_PLATFORM_DIRECTFB_EXT } diff --git a/thirdparty/vulkan/loader/vk_layer_dispatch_table.h b/thirdparty/vulkan/loader/vk_layer_dispatch_table.h index b919447e553..d774098d660 100644 --- a/thirdparty/vulkan/loader/vk_layer_dispatch_table.h +++ b/thirdparty/vulkan/loader/vk_layer_dispatch_table.h @@ -172,9 +172,6 @@ typedef struct VkLayerInstanceDispatchTable_ { PFN_vkCreateViSurfaceNN CreateViSurfaceNN; #endif // VK_USE_PLATFORM_VI_NN - // ---- VK_NVX_device_generated_commands extension commands - PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX GetPhysicalDeviceGeneratedCommandsPropertiesNVX; - // ---- VK_EXT_direct_mode_display extension commands PFN_vkReleaseDisplayEXT ReleaseDisplayEXT; @@ -236,6 +233,14 @@ typedef struct VkLayerInstanceDispatchTable_ { // ---- VK_EXT_headless_surface extension commands PFN_vkCreateHeadlessSurfaceEXT CreateHeadlessSurfaceEXT; + + // ---- VK_EXT_directfb_surface extension commands +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + PFN_vkCreateDirectFBSurfaceEXT CreateDirectFBSurfaceEXT; +#endif // VK_USE_PLATFORM_DIRECTFB_EXT +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT GetPhysicalDeviceDirectFBPresentationSupportEXT; +#endif // VK_USE_PLATFORM_DIRECTFB_EXT } VkLayerInstanceDispatchTable; // Device function pointer dispatch table @@ -506,11 +511,36 @@ typedef struct VkLayerDispatchTable_ { PFN_vkGetBufferOpaqueCaptureAddressKHR GetBufferOpaqueCaptureAddressKHR; PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR GetDeviceMemoryOpaqueCaptureAddressKHR; + // ---- VK_KHR_deferred_host_operations extension commands +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCreateDeferredOperationKHR CreateDeferredOperationKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkDestroyDeferredOperationKHR DestroyDeferredOperationKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkGetDeferredOperationMaxConcurrencyKHR GetDeferredOperationMaxConcurrencyKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkGetDeferredOperationResultKHR GetDeferredOperationResultKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkDeferredOperationJoinKHR DeferredOperationJoinKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS + // ---- VK_KHR_pipeline_executable_properties extension commands PFN_vkGetPipelineExecutablePropertiesKHR GetPipelineExecutablePropertiesKHR; PFN_vkGetPipelineExecutableStatisticsKHR GetPipelineExecutableStatisticsKHR; PFN_vkGetPipelineExecutableInternalRepresentationsKHR GetPipelineExecutableInternalRepresentationsKHR; + // ---- VK_KHR_copy_commands2 extension commands + PFN_vkCmdCopyBuffer2KHR CmdCopyBuffer2KHR; + PFN_vkCmdCopyImage2KHR CmdCopyImage2KHR; + PFN_vkCmdCopyBufferToImage2KHR CmdCopyBufferToImage2KHR; + PFN_vkCmdCopyImageToBuffer2KHR CmdCopyImageToBuffer2KHR; + PFN_vkCmdBlitImage2KHR CmdBlitImage2KHR; + PFN_vkCmdResolveImage2KHR CmdResolveImage2KHR; + // ---- VK_EXT_debug_marker extension commands PFN_vkDebugMarkerSetObjectTagEXT DebugMarkerSetObjectTagEXT; PFN_vkDebugMarkerSetObjectNameEXT DebugMarkerSetObjectNameEXT; @@ -528,6 +558,7 @@ typedef struct VkLayerDispatchTable_ { // ---- VK_NVX_image_view_handle extension commands PFN_vkGetImageViewHandleNVX GetImageViewHandleNVX; + PFN_vkGetImageViewAddressNVX GetImageViewAddressNVX; // ---- VK_AMD_draw_indirect_count extension commands PFN_vkCmdDrawIndirectCountAMD CmdDrawIndirectCountAMD; @@ -545,16 +576,6 @@ typedef struct VkLayerDispatchTable_ { PFN_vkCmdBeginConditionalRenderingEXT CmdBeginConditionalRenderingEXT; PFN_vkCmdEndConditionalRenderingEXT CmdEndConditionalRenderingEXT; - // ---- VK_NVX_device_generated_commands extension commands - PFN_vkCmdProcessCommandsNVX CmdProcessCommandsNVX; - PFN_vkCmdReserveSpaceForCommandsNVX CmdReserveSpaceForCommandsNVX; - PFN_vkCreateIndirectCommandsLayoutNVX CreateIndirectCommandsLayoutNVX; - PFN_vkDestroyIndirectCommandsLayoutNVX DestroyIndirectCommandsLayoutNVX; - PFN_vkCreateObjectTableNVX CreateObjectTableNVX; - PFN_vkDestroyObjectTableNVX DestroyObjectTableNVX; - PFN_vkRegisterObjectsNVX RegisterObjectsNVX; - PFN_vkUnregisterObjectsNVX UnregisterObjectsNVX; - // ---- VK_NV_clip_space_w_scaling extension commands PFN_vkCmdSetViewportWScalingNV CmdSetViewportWScalingNV; @@ -611,15 +632,19 @@ typedef struct VkLayerDispatchTable_ { // ---- VK_NV_ray_tracing extension commands PFN_vkCreateAccelerationStructureNV CreateAccelerationStructureNV; + PFN_vkDestroyAccelerationStructureKHR DestroyAccelerationStructureKHR; PFN_vkDestroyAccelerationStructureNV DestroyAccelerationStructureNV; PFN_vkGetAccelerationStructureMemoryRequirementsNV GetAccelerationStructureMemoryRequirementsNV; + PFN_vkBindAccelerationStructureMemoryKHR BindAccelerationStructureMemoryKHR; PFN_vkBindAccelerationStructureMemoryNV BindAccelerationStructureMemoryNV; PFN_vkCmdBuildAccelerationStructureNV CmdBuildAccelerationStructureNV; PFN_vkCmdCopyAccelerationStructureNV CmdCopyAccelerationStructureNV; PFN_vkCmdTraceRaysNV CmdTraceRaysNV; PFN_vkCreateRayTracingPipelinesNV CreateRayTracingPipelinesNV; + PFN_vkGetRayTracingShaderGroupHandlesKHR GetRayTracingShaderGroupHandlesKHR; PFN_vkGetRayTracingShaderGroupHandlesNV GetRayTracingShaderGroupHandlesNV; PFN_vkGetAccelerationStructureHandleNV GetAccelerationStructureHandleNV; + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR CmdWriteAccelerationStructuresPropertiesKHR; PFN_vkCmdWriteAccelerationStructuresPropertiesNV CmdWriteAccelerationStructuresPropertiesNV; PFN_vkCompileDeferredNV CompileDeferredNV; @@ -677,6 +702,90 @@ typedef struct VkLayerDispatchTable_ { // ---- VK_EXT_host_query_reset extension commands PFN_vkResetQueryPoolEXT ResetQueryPoolEXT; + + // ---- VK_EXT_extended_dynamic_state extension commands + PFN_vkCmdSetCullModeEXT CmdSetCullModeEXT; + PFN_vkCmdSetFrontFaceEXT CmdSetFrontFaceEXT; + PFN_vkCmdSetPrimitiveTopologyEXT CmdSetPrimitiveTopologyEXT; + PFN_vkCmdSetViewportWithCountEXT CmdSetViewportWithCountEXT; + PFN_vkCmdSetScissorWithCountEXT CmdSetScissorWithCountEXT; + PFN_vkCmdBindVertexBuffers2EXT CmdBindVertexBuffers2EXT; + PFN_vkCmdSetDepthTestEnableEXT CmdSetDepthTestEnableEXT; + PFN_vkCmdSetDepthWriteEnableEXT CmdSetDepthWriteEnableEXT; + PFN_vkCmdSetDepthCompareOpEXT CmdSetDepthCompareOpEXT; + PFN_vkCmdSetDepthBoundsTestEnableEXT CmdSetDepthBoundsTestEnableEXT; + PFN_vkCmdSetStencilTestEnableEXT CmdSetStencilTestEnableEXT; + PFN_vkCmdSetStencilOpEXT CmdSetStencilOpEXT; + + // ---- VK_NV_device_generated_commands extension commands + PFN_vkGetGeneratedCommandsMemoryRequirementsNV GetGeneratedCommandsMemoryRequirementsNV; + PFN_vkCmdPreprocessGeneratedCommandsNV CmdPreprocessGeneratedCommandsNV; + PFN_vkCmdExecuteGeneratedCommandsNV CmdExecuteGeneratedCommandsNV; + PFN_vkCmdBindPipelineShaderGroupNV CmdBindPipelineShaderGroupNV; + PFN_vkCreateIndirectCommandsLayoutNV CreateIndirectCommandsLayoutNV; + PFN_vkDestroyIndirectCommandsLayoutNV DestroyIndirectCommandsLayoutNV; + + // ---- VK_EXT_private_data extension commands + PFN_vkCreatePrivateDataSlotEXT CreatePrivateDataSlotEXT; + PFN_vkDestroyPrivateDataSlotEXT DestroyPrivateDataSlotEXT; + PFN_vkSetPrivateDataEXT SetPrivateDataEXT; + PFN_vkGetPrivateDataEXT GetPrivateDataEXT; + + // ---- VK_KHR_ray_tracing extension commands +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCreateAccelerationStructureKHR CreateAccelerationStructureKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkGetAccelerationStructureMemoryRequirementsKHR GetAccelerationStructureMemoryRequirementsKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCmdBuildAccelerationStructureKHR CmdBuildAccelerationStructureKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCmdBuildAccelerationStructureIndirectKHR CmdBuildAccelerationStructureIndirectKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkBuildAccelerationStructureKHR BuildAccelerationStructureKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCopyAccelerationStructureKHR CopyAccelerationStructureKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCopyAccelerationStructureToMemoryKHR CopyAccelerationStructureToMemoryKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCopyMemoryToAccelerationStructureKHR CopyMemoryToAccelerationStructureKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkWriteAccelerationStructuresPropertiesKHR WriteAccelerationStructuresPropertiesKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCmdCopyAccelerationStructureKHR CmdCopyAccelerationStructureKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCmdCopyAccelerationStructureToMemoryKHR CmdCopyAccelerationStructureToMemoryKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCmdCopyMemoryToAccelerationStructureKHR CmdCopyMemoryToAccelerationStructureKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCmdTraceRaysKHR CmdTraceRaysKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCreateRayTracingPipelinesKHR CreateRayTracingPipelinesKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkGetAccelerationStructureDeviceAddressKHR GetAccelerationStructureDeviceAddressKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR GetRayTracingCaptureReplayShaderGroupHandlesKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkCmdTraceRaysIndirectKHR CmdTraceRaysIndirectKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR GetDeviceAccelerationStructureCompatibilityKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS } VkLayerDispatchTable; diff --git a/thirdparty/vulkan/loader/vk_loader_extensions.c b/thirdparty/vulkan/loader/vk_loader_extensions.c index 542b8b4af41..72d40272818 100644 --- a/thirdparty/vulkan/loader/vk_loader_extensions.c +++ b/thirdparty/vulkan/loader/vk_loader_extensions.c @@ -208,9 +208,6 @@ VKAPI_ATTR bool VKAPI_CALL loader_icd_init_entries(struct loader_icd_term *icd_t LOOKUP_GIPA(CreateViSurfaceNN, false); #endif // VK_USE_PLATFORM_VI_NN - // ---- VK_NVX_device_generated_commands extension commands - LOOKUP_GIPA(GetPhysicalDeviceGeneratedCommandsPropertiesNVX, false); - // ---- VK_EXT_direct_mode_display extension commands LOOKUP_GIPA(ReleaseDisplayEXT, false); @@ -284,6 +281,14 @@ VKAPI_ATTR bool VKAPI_CALL loader_icd_init_entries(struct loader_icd_term *icd_t // ---- VK_EXT_headless_surface extension commands LOOKUP_GIPA(CreateHeadlessSurfaceEXT, false); + // ---- VK_EXT_directfb_surface extension commands +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + LOOKUP_GIPA(CreateDirectFBSurfaceEXT, false); +#endif // VK_USE_PLATFORM_DIRECTFB_EXT +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + LOOKUP_GIPA(GetPhysicalDeviceDirectFBPresentationSupportEXT, false); +#endif // VK_USE_PLATFORM_DIRECTFB_EXT + #undef LOOKUP_GIPA return true; @@ -569,11 +574,36 @@ VKAPI_ATTR void VKAPI_CALL loader_init_device_extension_dispatch_table(struct lo table->GetBufferOpaqueCaptureAddressKHR = (PFN_vkGetBufferOpaqueCaptureAddressKHR)gdpa(dev, "vkGetBufferOpaqueCaptureAddressKHR"); table->GetDeviceMemoryOpaqueCaptureAddressKHR = (PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR)gdpa(dev, "vkGetDeviceMemoryOpaqueCaptureAddressKHR"); + // ---- VK_KHR_deferred_host_operations extension commands +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->CreateDeferredOperationKHR = (PFN_vkCreateDeferredOperationKHR)gdpa(dev, "vkCreateDeferredOperationKHR"); +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->DestroyDeferredOperationKHR = (PFN_vkDestroyDeferredOperationKHR)gdpa(dev, "vkDestroyDeferredOperationKHR"); +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->GetDeferredOperationMaxConcurrencyKHR = (PFN_vkGetDeferredOperationMaxConcurrencyKHR)gdpa(dev, "vkGetDeferredOperationMaxConcurrencyKHR"); +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->GetDeferredOperationResultKHR = (PFN_vkGetDeferredOperationResultKHR)gdpa(dev, "vkGetDeferredOperationResultKHR"); +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->DeferredOperationJoinKHR = (PFN_vkDeferredOperationJoinKHR)gdpa(dev, "vkDeferredOperationJoinKHR"); +#endif // VK_ENABLE_BETA_EXTENSIONS + // ---- VK_KHR_pipeline_executable_properties extension commands table->GetPipelineExecutablePropertiesKHR = (PFN_vkGetPipelineExecutablePropertiesKHR)gdpa(dev, "vkGetPipelineExecutablePropertiesKHR"); table->GetPipelineExecutableStatisticsKHR = (PFN_vkGetPipelineExecutableStatisticsKHR)gdpa(dev, "vkGetPipelineExecutableStatisticsKHR"); table->GetPipelineExecutableInternalRepresentationsKHR = (PFN_vkGetPipelineExecutableInternalRepresentationsKHR)gdpa(dev, "vkGetPipelineExecutableInternalRepresentationsKHR"); + // ---- VK_KHR_copy_commands2 extension commands + table->CmdCopyBuffer2KHR = (PFN_vkCmdCopyBuffer2KHR)gdpa(dev, "vkCmdCopyBuffer2KHR"); + table->CmdCopyImage2KHR = (PFN_vkCmdCopyImage2KHR)gdpa(dev, "vkCmdCopyImage2KHR"); + table->CmdCopyBufferToImage2KHR = (PFN_vkCmdCopyBufferToImage2KHR)gdpa(dev, "vkCmdCopyBufferToImage2KHR"); + table->CmdCopyImageToBuffer2KHR = (PFN_vkCmdCopyImageToBuffer2KHR)gdpa(dev, "vkCmdCopyImageToBuffer2KHR"); + table->CmdBlitImage2KHR = (PFN_vkCmdBlitImage2KHR)gdpa(dev, "vkCmdBlitImage2KHR"); + table->CmdResolveImage2KHR = (PFN_vkCmdResolveImage2KHR)gdpa(dev, "vkCmdResolveImage2KHR"); + // ---- VK_EXT_debug_marker extension commands table->DebugMarkerSetObjectTagEXT = (PFN_vkDebugMarkerSetObjectTagEXT)gdpa(dev, "vkDebugMarkerSetObjectTagEXT"); table->DebugMarkerSetObjectNameEXT = (PFN_vkDebugMarkerSetObjectNameEXT)gdpa(dev, "vkDebugMarkerSetObjectNameEXT"); @@ -591,6 +621,7 @@ VKAPI_ATTR void VKAPI_CALL loader_init_device_extension_dispatch_table(struct lo // ---- VK_NVX_image_view_handle extension commands table->GetImageViewHandleNVX = (PFN_vkGetImageViewHandleNVX)gdpa(dev, "vkGetImageViewHandleNVX"); + table->GetImageViewAddressNVX = (PFN_vkGetImageViewAddressNVX)gdpa(dev, "vkGetImageViewAddressNVX"); // ---- VK_AMD_draw_indirect_count extension commands table->CmdDrawIndirectCountAMD = (PFN_vkCmdDrawIndirectCountAMD)gdpa(dev, "vkCmdDrawIndirectCountAMD"); @@ -608,16 +639,6 @@ VKAPI_ATTR void VKAPI_CALL loader_init_device_extension_dispatch_table(struct lo table->CmdBeginConditionalRenderingEXT = (PFN_vkCmdBeginConditionalRenderingEXT)gdpa(dev, "vkCmdBeginConditionalRenderingEXT"); table->CmdEndConditionalRenderingEXT = (PFN_vkCmdEndConditionalRenderingEXT)gdpa(dev, "vkCmdEndConditionalRenderingEXT"); - // ---- VK_NVX_device_generated_commands extension commands - table->CmdProcessCommandsNVX = (PFN_vkCmdProcessCommandsNVX)gdpa(dev, "vkCmdProcessCommandsNVX"); - table->CmdReserveSpaceForCommandsNVX = (PFN_vkCmdReserveSpaceForCommandsNVX)gdpa(dev, "vkCmdReserveSpaceForCommandsNVX"); - table->CreateIndirectCommandsLayoutNVX = (PFN_vkCreateIndirectCommandsLayoutNVX)gdpa(dev, "vkCreateIndirectCommandsLayoutNVX"); - table->DestroyIndirectCommandsLayoutNVX = (PFN_vkDestroyIndirectCommandsLayoutNVX)gdpa(dev, "vkDestroyIndirectCommandsLayoutNVX"); - table->CreateObjectTableNVX = (PFN_vkCreateObjectTableNVX)gdpa(dev, "vkCreateObjectTableNVX"); - table->DestroyObjectTableNVX = (PFN_vkDestroyObjectTableNVX)gdpa(dev, "vkDestroyObjectTableNVX"); - table->RegisterObjectsNVX = (PFN_vkRegisterObjectsNVX)gdpa(dev, "vkRegisterObjectsNVX"); - table->UnregisterObjectsNVX = (PFN_vkUnregisterObjectsNVX)gdpa(dev, "vkUnregisterObjectsNVX"); - // ---- VK_NV_clip_space_w_scaling extension commands table->CmdSetViewportWScalingNV = (PFN_vkCmdSetViewportWScalingNV)gdpa(dev, "vkCmdSetViewportWScalingNV"); @@ -674,15 +695,19 @@ VKAPI_ATTR void VKAPI_CALL loader_init_device_extension_dispatch_table(struct lo // ---- VK_NV_ray_tracing extension commands table->CreateAccelerationStructureNV = (PFN_vkCreateAccelerationStructureNV)gdpa(dev, "vkCreateAccelerationStructureNV"); + table->DestroyAccelerationStructureKHR = (PFN_vkDestroyAccelerationStructureKHR)gdpa(dev, "vkDestroyAccelerationStructureKHR"); table->DestroyAccelerationStructureNV = (PFN_vkDestroyAccelerationStructureNV)gdpa(dev, "vkDestroyAccelerationStructureNV"); table->GetAccelerationStructureMemoryRequirementsNV = (PFN_vkGetAccelerationStructureMemoryRequirementsNV)gdpa(dev, "vkGetAccelerationStructureMemoryRequirementsNV"); + table->BindAccelerationStructureMemoryKHR = (PFN_vkBindAccelerationStructureMemoryKHR)gdpa(dev, "vkBindAccelerationStructureMemoryKHR"); table->BindAccelerationStructureMemoryNV = (PFN_vkBindAccelerationStructureMemoryNV)gdpa(dev, "vkBindAccelerationStructureMemoryNV"); table->CmdBuildAccelerationStructureNV = (PFN_vkCmdBuildAccelerationStructureNV)gdpa(dev, "vkCmdBuildAccelerationStructureNV"); table->CmdCopyAccelerationStructureNV = (PFN_vkCmdCopyAccelerationStructureNV)gdpa(dev, "vkCmdCopyAccelerationStructureNV"); table->CmdTraceRaysNV = (PFN_vkCmdTraceRaysNV)gdpa(dev, "vkCmdTraceRaysNV"); table->CreateRayTracingPipelinesNV = (PFN_vkCreateRayTracingPipelinesNV)gdpa(dev, "vkCreateRayTracingPipelinesNV"); + table->GetRayTracingShaderGroupHandlesKHR = (PFN_vkGetRayTracingShaderGroupHandlesKHR)gdpa(dev, "vkGetRayTracingShaderGroupHandlesKHR"); table->GetRayTracingShaderGroupHandlesNV = (PFN_vkGetRayTracingShaderGroupHandlesNV)gdpa(dev, "vkGetRayTracingShaderGroupHandlesNV"); table->GetAccelerationStructureHandleNV = (PFN_vkGetAccelerationStructureHandleNV)gdpa(dev, "vkGetAccelerationStructureHandleNV"); + table->CmdWriteAccelerationStructuresPropertiesKHR = (PFN_vkCmdWriteAccelerationStructuresPropertiesKHR)gdpa(dev, "vkCmdWriteAccelerationStructuresPropertiesKHR"); table->CmdWriteAccelerationStructuresPropertiesNV = (PFN_vkCmdWriteAccelerationStructuresPropertiesNV)gdpa(dev, "vkCmdWriteAccelerationStructuresPropertiesNV"); table->CompileDeferredNV = (PFN_vkCompileDeferredNV)gdpa(dev, "vkCompileDeferredNV"); @@ -740,6 +765,90 @@ VKAPI_ATTR void VKAPI_CALL loader_init_device_extension_dispatch_table(struct lo // ---- VK_EXT_host_query_reset extension commands table->ResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT)gdpa(dev, "vkResetQueryPoolEXT"); + + // ---- VK_EXT_extended_dynamic_state extension commands + table->CmdSetCullModeEXT = (PFN_vkCmdSetCullModeEXT)gdpa(dev, "vkCmdSetCullModeEXT"); + table->CmdSetFrontFaceEXT = (PFN_vkCmdSetFrontFaceEXT)gdpa(dev, "vkCmdSetFrontFaceEXT"); + table->CmdSetPrimitiveTopologyEXT = (PFN_vkCmdSetPrimitiveTopologyEXT)gdpa(dev, "vkCmdSetPrimitiveTopologyEXT"); + table->CmdSetViewportWithCountEXT = (PFN_vkCmdSetViewportWithCountEXT)gdpa(dev, "vkCmdSetViewportWithCountEXT"); + table->CmdSetScissorWithCountEXT = (PFN_vkCmdSetScissorWithCountEXT)gdpa(dev, "vkCmdSetScissorWithCountEXT"); + table->CmdBindVertexBuffers2EXT = (PFN_vkCmdBindVertexBuffers2EXT)gdpa(dev, "vkCmdBindVertexBuffers2EXT"); + table->CmdSetDepthTestEnableEXT = (PFN_vkCmdSetDepthTestEnableEXT)gdpa(dev, "vkCmdSetDepthTestEnableEXT"); + table->CmdSetDepthWriteEnableEXT = (PFN_vkCmdSetDepthWriteEnableEXT)gdpa(dev, "vkCmdSetDepthWriteEnableEXT"); + table->CmdSetDepthCompareOpEXT = (PFN_vkCmdSetDepthCompareOpEXT)gdpa(dev, "vkCmdSetDepthCompareOpEXT"); + table->CmdSetDepthBoundsTestEnableEXT = (PFN_vkCmdSetDepthBoundsTestEnableEXT)gdpa(dev, "vkCmdSetDepthBoundsTestEnableEXT"); + table->CmdSetStencilTestEnableEXT = (PFN_vkCmdSetStencilTestEnableEXT)gdpa(dev, "vkCmdSetStencilTestEnableEXT"); + table->CmdSetStencilOpEXT = (PFN_vkCmdSetStencilOpEXT)gdpa(dev, "vkCmdSetStencilOpEXT"); + + // ---- VK_NV_device_generated_commands extension commands + table->GetGeneratedCommandsMemoryRequirementsNV = (PFN_vkGetGeneratedCommandsMemoryRequirementsNV)gdpa(dev, "vkGetGeneratedCommandsMemoryRequirementsNV"); + table->CmdPreprocessGeneratedCommandsNV = (PFN_vkCmdPreprocessGeneratedCommandsNV)gdpa(dev, "vkCmdPreprocessGeneratedCommandsNV"); + table->CmdExecuteGeneratedCommandsNV = (PFN_vkCmdExecuteGeneratedCommandsNV)gdpa(dev, "vkCmdExecuteGeneratedCommandsNV"); + table->CmdBindPipelineShaderGroupNV = (PFN_vkCmdBindPipelineShaderGroupNV)gdpa(dev, "vkCmdBindPipelineShaderGroupNV"); + table->CreateIndirectCommandsLayoutNV = (PFN_vkCreateIndirectCommandsLayoutNV)gdpa(dev, "vkCreateIndirectCommandsLayoutNV"); + table->DestroyIndirectCommandsLayoutNV = (PFN_vkDestroyIndirectCommandsLayoutNV)gdpa(dev, "vkDestroyIndirectCommandsLayoutNV"); + + // ---- VK_EXT_private_data extension commands + table->CreatePrivateDataSlotEXT = (PFN_vkCreatePrivateDataSlotEXT)gdpa(dev, "vkCreatePrivateDataSlotEXT"); + table->DestroyPrivateDataSlotEXT = (PFN_vkDestroyPrivateDataSlotEXT)gdpa(dev, "vkDestroyPrivateDataSlotEXT"); + table->SetPrivateDataEXT = (PFN_vkSetPrivateDataEXT)gdpa(dev, "vkSetPrivateDataEXT"); + table->GetPrivateDataEXT = (PFN_vkGetPrivateDataEXT)gdpa(dev, "vkGetPrivateDataEXT"); + + // ---- VK_KHR_ray_tracing extension commands +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->CreateAccelerationStructureKHR = (PFN_vkCreateAccelerationStructureKHR)gdpa(dev, "vkCreateAccelerationStructureKHR"); +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->GetAccelerationStructureMemoryRequirementsKHR = (PFN_vkGetAccelerationStructureMemoryRequirementsKHR)gdpa(dev, "vkGetAccelerationStructureMemoryRequirementsKHR"); +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->CmdBuildAccelerationStructureKHR = (PFN_vkCmdBuildAccelerationStructureKHR)gdpa(dev, "vkCmdBuildAccelerationStructureKHR"); +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->CmdBuildAccelerationStructureIndirectKHR = (PFN_vkCmdBuildAccelerationStructureIndirectKHR)gdpa(dev, "vkCmdBuildAccelerationStructureIndirectKHR"); +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->BuildAccelerationStructureKHR = (PFN_vkBuildAccelerationStructureKHR)gdpa(dev, "vkBuildAccelerationStructureKHR"); +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->CopyAccelerationStructureKHR = (PFN_vkCopyAccelerationStructureKHR)gdpa(dev, "vkCopyAccelerationStructureKHR"); +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->CopyAccelerationStructureToMemoryKHR = (PFN_vkCopyAccelerationStructureToMemoryKHR)gdpa(dev, "vkCopyAccelerationStructureToMemoryKHR"); +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->CopyMemoryToAccelerationStructureKHR = (PFN_vkCopyMemoryToAccelerationStructureKHR)gdpa(dev, "vkCopyMemoryToAccelerationStructureKHR"); +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->WriteAccelerationStructuresPropertiesKHR = (PFN_vkWriteAccelerationStructuresPropertiesKHR)gdpa(dev, "vkWriteAccelerationStructuresPropertiesKHR"); +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->CmdCopyAccelerationStructureKHR = (PFN_vkCmdCopyAccelerationStructureKHR)gdpa(dev, "vkCmdCopyAccelerationStructureKHR"); +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->CmdCopyAccelerationStructureToMemoryKHR = (PFN_vkCmdCopyAccelerationStructureToMemoryKHR)gdpa(dev, "vkCmdCopyAccelerationStructureToMemoryKHR"); +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->CmdCopyMemoryToAccelerationStructureKHR = (PFN_vkCmdCopyMemoryToAccelerationStructureKHR)gdpa(dev, "vkCmdCopyMemoryToAccelerationStructureKHR"); +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->CmdTraceRaysKHR = (PFN_vkCmdTraceRaysKHR)gdpa(dev, "vkCmdTraceRaysKHR"); +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->CreateRayTracingPipelinesKHR = (PFN_vkCreateRayTracingPipelinesKHR)gdpa(dev, "vkCreateRayTracingPipelinesKHR"); +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->GetAccelerationStructureDeviceAddressKHR = (PFN_vkGetAccelerationStructureDeviceAddressKHR)gdpa(dev, "vkGetAccelerationStructureDeviceAddressKHR"); +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->GetRayTracingCaptureReplayShaderGroupHandlesKHR = (PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR)gdpa(dev, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR"); +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->CmdTraceRaysIndirectKHR = (PFN_vkCmdTraceRaysIndirectKHR)gdpa(dev, "vkCmdTraceRaysIndirectKHR"); +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + table->GetDeviceAccelerationStructureCompatibilityKHR = (PFN_vkGetDeviceAccelerationStructureCompatibilityKHR)gdpa(dev, "vkGetDeviceAccelerationStructureCompatibilityKHR"); +#endif // VK_ENABLE_BETA_EXTENSIONS } // Init Instance function pointer dispatch table with core commands @@ -887,9 +996,6 @@ VKAPI_ATTR void VKAPI_CALL loader_init_instance_extension_dispatch_table(VkLayer table->CreateViSurfaceNN = (PFN_vkCreateViSurfaceNN)gpa(inst, "vkCreateViSurfaceNN"); #endif // VK_USE_PLATFORM_VI_NN - // ---- VK_NVX_device_generated_commands extension commands - table->GetPhysicalDeviceGeneratedCommandsPropertiesNVX = (PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX)gpa(inst, "vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX"); - // ---- VK_EXT_direct_mode_display extension commands table->ReleaseDisplayEXT = (PFN_vkReleaseDisplayEXT)gpa(inst, "vkReleaseDisplayEXT"); @@ -951,6 +1057,14 @@ VKAPI_ATTR void VKAPI_CALL loader_init_instance_extension_dispatch_table(VkLayer // ---- VK_EXT_headless_surface extension commands table->CreateHeadlessSurfaceEXT = (PFN_vkCreateHeadlessSurfaceEXT)gpa(inst, "vkCreateHeadlessSurfaceEXT"); + + // ---- VK_EXT_directfb_surface extension commands +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + table->CreateDirectFBSurfaceEXT = (PFN_vkCreateDirectFBSurfaceEXT)gpa(inst, "vkCreateDirectFBSurfaceEXT"); +#endif // VK_USE_PLATFORM_DIRECTFB_EXT +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + table->GetPhysicalDeviceDirectFBPresentationSupportEXT = (PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT)gpa(inst, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT"); +#endif // VK_USE_PLATFORM_DIRECTFB_EXT } // Device command lookup function @@ -1224,11 +1338,36 @@ VKAPI_ATTR void* VKAPI_CALL loader_lookup_device_dispatch_table(const VkLayerDis if (!strcmp(name, "GetBufferOpaqueCaptureAddressKHR")) return (void *)table->GetBufferOpaqueCaptureAddressKHR; if (!strcmp(name, "GetDeviceMemoryOpaqueCaptureAddressKHR")) return (void *)table->GetDeviceMemoryOpaqueCaptureAddressKHR; + // ---- VK_KHR_deferred_host_operations extension commands +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp(name, "CreateDeferredOperationKHR")) return (void *)table->CreateDeferredOperationKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp(name, "DestroyDeferredOperationKHR")) return (void *)table->DestroyDeferredOperationKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp(name, "GetDeferredOperationMaxConcurrencyKHR")) return (void *)table->GetDeferredOperationMaxConcurrencyKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp(name, "GetDeferredOperationResultKHR")) return (void *)table->GetDeferredOperationResultKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp(name, "DeferredOperationJoinKHR")) return (void *)table->DeferredOperationJoinKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS + // ---- VK_KHR_pipeline_executable_properties extension commands if (!strcmp(name, "GetPipelineExecutablePropertiesKHR")) return (void *)table->GetPipelineExecutablePropertiesKHR; if (!strcmp(name, "GetPipelineExecutableStatisticsKHR")) return (void *)table->GetPipelineExecutableStatisticsKHR; if (!strcmp(name, "GetPipelineExecutableInternalRepresentationsKHR")) return (void *)table->GetPipelineExecutableInternalRepresentationsKHR; + // ---- VK_KHR_copy_commands2 extension commands + if (!strcmp(name, "CmdCopyBuffer2KHR")) return (void *)table->CmdCopyBuffer2KHR; + if (!strcmp(name, "CmdCopyImage2KHR")) return (void *)table->CmdCopyImage2KHR; + if (!strcmp(name, "CmdCopyBufferToImage2KHR")) return (void *)table->CmdCopyBufferToImage2KHR; + if (!strcmp(name, "CmdCopyImageToBuffer2KHR")) return (void *)table->CmdCopyImageToBuffer2KHR; + if (!strcmp(name, "CmdBlitImage2KHR")) return (void *)table->CmdBlitImage2KHR; + if (!strcmp(name, "CmdResolveImage2KHR")) return (void *)table->CmdResolveImage2KHR; + // ---- VK_EXT_debug_marker extension commands if (!strcmp(name, "DebugMarkerSetObjectTagEXT")) return (void *)table->DebugMarkerSetObjectTagEXT; if (!strcmp(name, "DebugMarkerSetObjectNameEXT")) return (void *)table->DebugMarkerSetObjectNameEXT; @@ -1246,6 +1385,7 @@ VKAPI_ATTR void* VKAPI_CALL loader_lookup_device_dispatch_table(const VkLayerDis // ---- VK_NVX_image_view_handle extension commands if (!strcmp(name, "GetImageViewHandleNVX")) return (void *)table->GetImageViewHandleNVX; + if (!strcmp(name, "GetImageViewAddressNVX")) return (void *)table->GetImageViewAddressNVX; // ---- VK_AMD_draw_indirect_count extension commands if (!strcmp(name, "CmdDrawIndirectCountAMD")) return (void *)table->CmdDrawIndirectCountAMD; @@ -1263,16 +1403,6 @@ VKAPI_ATTR void* VKAPI_CALL loader_lookup_device_dispatch_table(const VkLayerDis if (!strcmp(name, "CmdBeginConditionalRenderingEXT")) return (void *)table->CmdBeginConditionalRenderingEXT; if (!strcmp(name, "CmdEndConditionalRenderingEXT")) return (void *)table->CmdEndConditionalRenderingEXT; - // ---- VK_NVX_device_generated_commands extension commands - if (!strcmp(name, "CmdProcessCommandsNVX")) return (void *)table->CmdProcessCommandsNVX; - if (!strcmp(name, "CmdReserveSpaceForCommandsNVX")) return (void *)table->CmdReserveSpaceForCommandsNVX; - if (!strcmp(name, "CreateIndirectCommandsLayoutNVX")) return (void *)table->CreateIndirectCommandsLayoutNVX; - if (!strcmp(name, "DestroyIndirectCommandsLayoutNVX")) return (void *)table->DestroyIndirectCommandsLayoutNVX; - if (!strcmp(name, "CreateObjectTableNVX")) return (void *)table->CreateObjectTableNVX; - if (!strcmp(name, "DestroyObjectTableNVX")) return (void *)table->DestroyObjectTableNVX; - if (!strcmp(name, "RegisterObjectsNVX")) return (void *)table->RegisterObjectsNVX; - if (!strcmp(name, "UnregisterObjectsNVX")) return (void *)table->UnregisterObjectsNVX; - // ---- VK_NV_clip_space_w_scaling extension commands if (!strcmp(name, "CmdSetViewportWScalingNV")) return (void *)table->CmdSetViewportWScalingNV; @@ -1329,15 +1459,19 @@ VKAPI_ATTR void* VKAPI_CALL loader_lookup_device_dispatch_table(const VkLayerDis // ---- VK_NV_ray_tracing extension commands if (!strcmp(name, "CreateAccelerationStructureNV")) return (void *)table->CreateAccelerationStructureNV; + if (!strcmp(name, "DestroyAccelerationStructureKHR")) return (void *)table->DestroyAccelerationStructureKHR; if (!strcmp(name, "DestroyAccelerationStructureNV")) return (void *)table->DestroyAccelerationStructureNV; if (!strcmp(name, "GetAccelerationStructureMemoryRequirementsNV")) return (void *)table->GetAccelerationStructureMemoryRequirementsNV; + if (!strcmp(name, "BindAccelerationStructureMemoryKHR")) return (void *)table->BindAccelerationStructureMemoryKHR; if (!strcmp(name, "BindAccelerationStructureMemoryNV")) return (void *)table->BindAccelerationStructureMemoryNV; if (!strcmp(name, "CmdBuildAccelerationStructureNV")) return (void *)table->CmdBuildAccelerationStructureNV; if (!strcmp(name, "CmdCopyAccelerationStructureNV")) return (void *)table->CmdCopyAccelerationStructureNV; if (!strcmp(name, "CmdTraceRaysNV")) return (void *)table->CmdTraceRaysNV; if (!strcmp(name, "CreateRayTracingPipelinesNV")) return (void *)table->CreateRayTracingPipelinesNV; + if (!strcmp(name, "GetRayTracingShaderGroupHandlesKHR")) return (void *)table->GetRayTracingShaderGroupHandlesKHR; if (!strcmp(name, "GetRayTracingShaderGroupHandlesNV")) return (void *)table->GetRayTracingShaderGroupHandlesNV; if (!strcmp(name, "GetAccelerationStructureHandleNV")) return (void *)table->GetAccelerationStructureHandleNV; + if (!strcmp(name, "CmdWriteAccelerationStructuresPropertiesKHR")) return (void *)table->CmdWriteAccelerationStructuresPropertiesKHR; if (!strcmp(name, "CmdWriteAccelerationStructuresPropertiesNV")) return (void *)table->CmdWriteAccelerationStructuresPropertiesNV; if (!strcmp(name, "CompileDeferredNV")) return (void *)table->CompileDeferredNV; @@ -1396,6 +1530,90 @@ VKAPI_ATTR void* VKAPI_CALL loader_lookup_device_dispatch_table(const VkLayerDis // ---- VK_EXT_host_query_reset extension commands if (!strcmp(name, "ResetQueryPoolEXT")) return (void *)table->ResetQueryPoolEXT; + // ---- VK_EXT_extended_dynamic_state extension commands + if (!strcmp(name, "CmdSetCullModeEXT")) return (void *)table->CmdSetCullModeEXT; + if (!strcmp(name, "CmdSetFrontFaceEXT")) return (void *)table->CmdSetFrontFaceEXT; + if (!strcmp(name, "CmdSetPrimitiveTopologyEXT")) return (void *)table->CmdSetPrimitiveTopologyEXT; + if (!strcmp(name, "CmdSetViewportWithCountEXT")) return (void *)table->CmdSetViewportWithCountEXT; + if (!strcmp(name, "CmdSetScissorWithCountEXT")) return (void *)table->CmdSetScissorWithCountEXT; + if (!strcmp(name, "CmdBindVertexBuffers2EXT")) return (void *)table->CmdBindVertexBuffers2EXT; + if (!strcmp(name, "CmdSetDepthTestEnableEXT")) return (void *)table->CmdSetDepthTestEnableEXT; + if (!strcmp(name, "CmdSetDepthWriteEnableEXT")) return (void *)table->CmdSetDepthWriteEnableEXT; + if (!strcmp(name, "CmdSetDepthCompareOpEXT")) return (void *)table->CmdSetDepthCompareOpEXT; + if (!strcmp(name, "CmdSetDepthBoundsTestEnableEXT")) return (void *)table->CmdSetDepthBoundsTestEnableEXT; + if (!strcmp(name, "CmdSetStencilTestEnableEXT")) return (void *)table->CmdSetStencilTestEnableEXT; + if (!strcmp(name, "CmdSetStencilOpEXT")) return (void *)table->CmdSetStencilOpEXT; + + // ---- VK_NV_device_generated_commands extension commands + if (!strcmp(name, "GetGeneratedCommandsMemoryRequirementsNV")) return (void *)table->GetGeneratedCommandsMemoryRequirementsNV; + if (!strcmp(name, "CmdPreprocessGeneratedCommandsNV")) return (void *)table->CmdPreprocessGeneratedCommandsNV; + if (!strcmp(name, "CmdExecuteGeneratedCommandsNV")) return (void *)table->CmdExecuteGeneratedCommandsNV; + if (!strcmp(name, "CmdBindPipelineShaderGroupNV")) return (void *)table->CmdBindPipelineShaderGroupNV; + if (!strcmp(name, "CreateIndirectCommandsLayoutNV")) return (void *)table->CreateIndirectCommandsLayoutNV; + if (!strcmp(name, "DestroyIndirectCommandsLayoutNV")) return (void *)table->DestroyIndirectCommandsLayoutNV; + + // ---- VK_EXT_private_data extension commands + if (!strcmp(name, "CreatePrivateDataSlotEXT")) return (void *)table->CreatePrivateDataSlotEXT; + if (!strcmp(name, "DestroyPrivateDataSlotEXT")) return (void *)table->DestroyPrivateDataSlotEXT; + if (!strcmp(name, "SetPrivateDataEXT")) return (void *)table->SetPrivateDataEXT; + if (!strcmp(name, "GetPrivateDataEXT")) return (void *)table->GetPrivateDataEXT; + + // ---- VK_KHR_ray_tracing extension commands +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp(name, "CreateAccelerationStructureKHR")) return (void *)table->CreateAccelerationStructureKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp(name, "GetAccelerationStructureMemoryRequirementsKHR")) return (void *)table->GetAccelerationStructureMemoryRequirementsKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp(name, "CmdBuildAccelerationStructureKHR")) return (void *)table->CmdBuildAccelerationStructureKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp(name, "CmdBuildAccelerationStructureIndirectKHR")) return (void *)table->CmdBuildAccelerationStructureIndirectKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp(name, "BuildAccelerationStructureKHR")) return (void *)table->BuildAccelerationStructureKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp(name, "CopyAccelerationStructureKHR")) return (void *)table->CopyAccelerationStructureKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp(name, "CopyAccelerationStructureToMemoryKHR")) return (void *)table->CopyAccelerationStructureToMemoryKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp(name, "CopyMemoryToAccelerationStructureKHR")) return (void *)table->CopyMemoryToAccelerationStructureKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp(name, "WriteAccelerationStructuresPropertiesKHR")) return (void *)table->WriteAccelerationStructuresPropertiesKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp(name, "CmdCopyAccelerationStructureKHR")) return (void *)table->CmdCopyAccelerationStructureKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp(name, "CmdCopyAccelerationStructureToMemoryKHR")) return (void *)table->CmdCopyAccelerationStructureToMemoryKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp(name, "CmdCopyMemoryToAccelerationStructureKHR")) return (void *)table->CmdCopyMemoryToAccelerationStructureKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp(name, "CmdTraceRaysKHR")) return (void *)table->CmdTraceRaysKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp(name, "CreateRayTracingPipelinesKHR")) return (void *)table->CreateRayTracingPipelinesKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp(name, "GetAccelerationStructureDeviceAddressKHR")) return (void *)table->GetAccelerationStructureDeviceAddressKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp(name, "GetRayTracingCaptureReplayShaderGroupHandlesKHR")) return (void *)table->GetRayTracingCaptureReplayShaderGroupHandlesKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp(name, "CmdTraceRaysIndirectKHR")) return (void *)table->CmdTraceRaysIndirectKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp(name, "GetDeviceAccelerationStructureCompatibilityKHR")) return (void *)table->GetDeviceAccelerationStructureCompatibilityKHR; +#endif // VK_ENABLE_BETA_EXTENSIONS + return NULL; } @@ -1546,9 +1764,6 @@ VKAPI_ATTR void* VKAPI_CALL loader_lookup_instance_dispatch_table(const VkLayerI if (!strcmp(name, "CreateViSurfaceNN")) return (void *)table->CreateViSurfaceNN; #endif // VK_USE_PLATFORM_VI_NN - // ---- VK_NVX_device_generated_commands extension commands - if (!strcmp(name, "GetPhysicalDeviceGeneratedCommandsPropertiesNVX")) return (void *)table->GetPhysicalDeviceGeneratedCommandsPropertiesNVX; - // ---- VK_EXT_direct_mode_display extension commands if (!strcmp(name, "ReleaseDisplayEXT")) return (void *)table->ReleaseDisplayEXT; @@ -1611,6 +1826,14 @@ VKAPI_ATTR void* VKAPI_CALL loader_lookup_instance_dispatch_table(const VkLayerI // ---- VK_EXT_headless_surface extension commands if (!strcmp(name, "CreateHeadlessSurfaceEXT")) return (void *)table->CreateHeadlessSurfaceEXT; + // ---- VK_EXT_directfb_surface extension commands +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + if (!strcmp(name, "CreateDirectFBSurfaceEXT")) return (void *)table->CreateDirectFBSurfaceEXT; +#endif // VK_USE_PLATFORM_DIRECTFB_EXT +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + if (!strcmp(name, "GetPhysicalDeviceDirectFBPresentationSupportEXT")) return (void *)table->GetPhysicalDeviceDirectFBPresentationSupportEXT; +#endif // VK_USE_PLATFORM_DIRECTFB_EXT + *found_name = false; return NULL; } @@ -2102,6 +2325,56 @@ VKAPI_ATTR uint64_t VKAPI_CALL GetDeviceMemoryOpaqueCaptureAddressKHR( } +// ---- VK_KHR_deferred_host_operations extension trampoline/terminators + +#ifdef VK_ENABLE_BETA_EXTENSIONS +VKAPI_ATTR VkResult VKAPI_CALL CreateDeferredOperationKHR( + VkDevice device, + const VkAllocationCallbacks* pAllocator, + VkDeferredOperationKHR* pDeferredOperation) { + const VkLayerDispatchTable *disp = loader_get_dispatch(device); + return disp->CreateDeferredOperationKHR(device, pAllocator, pDeferredOperation); +} + +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +VKAPI_ATTR void VKAPI_CALL DestroyDeferredOperationKHR( + VkDevice device, + VkDeferredOperationKHR operation, + const VkAllocationCallbacks* pAllocator) { + const VkLayerDispatchTable *disp = loader_get_dispatch(device); + disp->DestroyDeferredOperationKHR(device, operation, pAllocator); +} + +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +VKAPI_ATTR uint32_t VKAPI_CALL GetDeferredOperationMaxConcurrencyKHR( + VkDevice device, + VkDeferredOperationKHR operation) { + const VkLayerDispatchTable *disp = loader_get_dispatch(device); + return disp->GetDeferredOperationMaxConcurrencyKHR(device, operation); +} + +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +VKAPI_ATTR VkResult VKAPI_CALL GetDeferredOperationResultKHR( + VkDevice device, + VkDeferredOperationKHR operation) { + const VkLayerDispatchTable *disp = loader_get_dispatch(device); + return disp->GetDeferredOperationResultKHR(device, operation); +} + +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +VKAPI_ATTR VkResult VKAPI_CALL DeferredOperationJoinKHR( + VkDevice device, + VkDeferredOperationKHR operation) { + const VkLayerDispatchTable *disp = loader_get_dispatch(device); + return disp->DeferredOperationJoinKHR(device, operation); +} + +#endif // VK_ENABLE_BETA_EXTENSIONS + // ---- VK_KHR_pipeline_executable_properties extension trampoline/terminators VKAPI_ATTR VkResult VKAPI_CALL GetPipelineExecutablePropertiesKHR( @@ -2132,6 +2405,51 @@ VKAPI_ATTR VkResult VKAPI_CALL GetPipelineExecutableInternalRepresentationsKHR( } +// ---- VK_KHR_copy_commands2 extension trampoline/terminators + +VKAPI_ATTR void VKAPI_CALL CmdCopyBuffer2KHR( + VkCommandBuffer commandBuffer, + const VkCopyBufferInfo2KHR* pCopyBufferInfo) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdCopyBuffer2KHR(commandBuffer, pCopyBufferInfo); +} + +VKAPI_ATTR void VKAPI_CALL CmdCopyImage2KHR( + VkCommandBuffer commandBuffer, + const VkCopyImageInfo2KHR* pCopyImageInfo) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdCopyImage2KHR(commandBuffer, pCopyImageInfo); +} + +VKAPI_ATTR void VKAPI_CALL CmdCopyBufferToImage2KHR( + VkCommandBuffer commandBuffer, + const VkCopyBufferToImageInfo2KHR* pCopyBufferToImageInfo) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdCopyBufferToImage2KHR(commandBuffer, pCopyBufferToImageInfo); +} + +VKAPI_ATTR void VKAPI_CALL CmdCopyImageToBuffer2KHR( + VkCommandBuffer commandBuffer, + const VkCopyImageToBufferInfo2KHR* pCopyImageToBufferInfo) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdCopyImageToBuffer2KHR(commandBuffer, pCopyImageToBufferInfo); +} + +VKAPI_ATTR void VKAPI_CALL CmdBlitImage2KHR( + VkCommandBuffer commandBuffer, + const VkBlitImageInfo2KHR* pBlitImageInfo) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdBlitImage2KHR(commandBuffer, pBlitImageInfo); +} + +VKAPI_ATTR void VKAPI_CALL CmdResolveImage2KHR( + VkCommandBuffer commandBuffer, + const VkResolveImageInfo2KHR* pResolveImageInfo) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdResolveImage2KHR(commandBuffer, pResolveImageInfo); +} + + // ---- VK_EXT_debug_marker extension trampoline/terminators VKAPI_ATTR VkResult VKAPI_CALL DebugMarkerSetObjectTagEXT( @@ -2313,6 +2631,14 @@ VKAPI_ATTR uint32_t VKAPI_CALL GetImageViewHandleNVX( return disp->GetImageViewHandleNVX(device, pInfo); } +VKAPI_ATTR VkResult VKAPI_CALL GetImageViewAddressNVX( + VkDevice device, + VkImageView imageView, + VkImageViewAddressPropertiesNVX* pProperties) { + const VkLayerDispatchTable *disp = loader_get_dispatch(device); + return disp->GetImageViewAddressNVX(device, imageView, pProperties); +} + // ---- VK_AMD_draw_indirect_count extension trampoline/terminators @@ -2429,100 +2755,6 @@ VKAPI_ATTR void VKAPI_CALL CmdEndConditionalRenderingEXT( } -// ---- VK_NVX_device_generated_commands extension trampoline/terminators - -VKAPI_ATTR void VKAPI_CALL CmdProcessCommandsNVX( - VkCommandBuffer commandBuffer, - const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo) { - const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); - disp->CmdProcessCommandsNVX(commandBuffer, pProcessCommandsInfo); -} - -VKAPI_ATTR void VKAPI_CALL CmdReserveSpaceForCommandsNVX( - VkCommandBuffer commandBuffer, - const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo) { - const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); - disp->CmdReserveSpaceForCommandsNVX(commandBuffer, pReserveSpaceInfo); -} - -VKAPI_ATTR VkResult VKAPI_CALL CreateIndirectCommandsLayoutNVX( - VkDevice device, - const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout) { - const VkLayerDispatchTable *disp = loader_get_dispatch(device); - return disp->CreateIndirectCommandsLayoutNVX(device, pCreateInfo, pAllocator, pIndirectCommandsLayout); -} - -VKAPI_ATTR void VKAPI_CALL DestroyIndirectCommandsLayoutNVX( - VkDevice device, - VkIndirectCommandsLayoutNVX indirectCommandsLayout, - const VkAllocationCallbacks* pAllocator) { - const VkLayerDispatchTable *disp = loader_get_dispatch(device); - disp->DestroyIndirectCommandsLayoutNVX(device, indirectCommandsLayout, pAllocator); -} - -VKAPI_ATTR VkResult VKAPI_CALL CreateObjectTableNVX( - VkDevice device, - const VkObjectTableCreateInfoNVX* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkObjectTableNVX* pObjectTable) { - const VkLayerDispatchTable *disp = loader_get_dispatch(device); - return disp->CreateObjectTableNVX(device, pCreateInfo, pAllocator, pObjectTable); -} - -VKAPI_ATTR void VKAPI_CALL DestroyObjectTableNVX( - VkDevice device, - VkObjectTableNVX objectTable, - const VkAllocationCallbacks* pAllocator) { - const VkLayerDispatchTable *disp = loader_get_dispatch(device); - disp->DestroyObjectTableNVX(device, objectTable, pAllocator); -} - -VKAPI_ATTR VkResult VKAPI_CALL RegisterObjectsNVX( - VkDevice device, - VkObjectTableNVX objectTable, - uint32_t objectCount, - const VkObjectTableEntryNVX* const* ppObjectTableEntries, - const uint32_t* pObjectIndices) { - const VkLayerDispatchTable *disp = loader_get_dispatch(device); - return disp->RegisterObjectsNVX(device, objectTable, objectCount, ppObjectTableEntries, pObjectIndices); -} - -VKAPI_ATTR VkResult VKAPI_CALL UnregisterObjectsNVX( - VkDevice device, - VkObjectTableNVX objectTable, - uint32_t objectCount, - const VkObjectEntryTypeNVX* pObjectEntryTypes, - const uint32_t* pObjectIndices) { - const VkLayerDispatchTable *disp = loader_get_dispatch(device); - return disp->UnregisterObjectsNVX(device, objectTable, objectCount, pObjectEntryTypes, pObjectIndices); -} - -VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceGeneratedCommandsPropertiesNVX( - VkPhysicalDevice physicalDevice, - VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, - VkDeviceGeneratedCommandsLimitsNVX* pLimits) { - const VkLayerInstanceDispatchTable *disp; - VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice); - disp = loader_get_instance_layer_dispatch(physicalDevice); - disp->GetPhysicalDeviceGeneratedCommandsPropertiesNVX(unwrapped_phys_dev, pFeatures, pLimits); -} - -VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceGeneratedCommandsPropertiesNVX( - VkPhysicalDevice physicalDevice, - VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, - VkDeviceGeneratedCommandsLimitsNVX* pLimits) { - struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice; - struct loader_icd_term *icd_term = phys_dev_term->this_icd_term; - if (NULL == icd_term->dispatch.GetPhysicalDeviceGeneratedCommandsPropertiesNVX) { - loader_log(icd_term->this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, - "ICD associated with VkPhysicalDevice does not support GetPhysicalDeviceGeneratedCommandsPropertiesNVX"); - } - icd_term->dispatch.GetPhysicalDeviceGeneratedCommandsPropertiesNVX(phys_dev_term->phys_dev, pFeatures, pLimits); -} - - // ---- VK_NV_clip_space_w_scaling extension trampoline/terminators VKAPI_ATTR void VKAPI_CALL CmdSetViewportWScalingNV( @@ -2974,9 +3206,17 @@ VKAPI_ATTR VkResult VKAPI_CALL CreateAccelerationStructureNV( return disp->CreateAccelerationStructureNV(device, pCreateInfo, pAllocator, pAccelerationStructure); } +VKAPI_ATTR void VKAPI_CALL DestroyAccelerationStructureKHR( + VkDevice device, + VkAccelerationStructureKHR accelerationStructure, + const VkAllocationCallbacks* pAllocator) { + const VkLayerDispatchTable *disp = loader_get_dispatch(device); + disp->DestroyAccelerationStructureKHR(device, accelerationStructure, pAllocator); +} + VKAPI_ATTR void VKAPI_CALL DestroyAccelerationStructureNV( VkDevice device, - VkAccelerationStructureNV accelerationStructure, + VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator) { const VkLayerDispatchTable *disp = loader_get_dispatch(device); disp->DestroyAccelerationStructureNV(device, accelerationStructure, pAllocator); @@ -2990,10 +3230,18 @@ VKAPI_ATTR void VKAPI_CALL GetAccelerationStructureMemoryRequirementsNV( disp->GetAccelerationStructureMemoryRequirementsNV(device, pInfo, pMemoryRequirements); } +VKAPI_ATTR VkResult VKAPI_CALL BindAccelerationStructureMemoryKHR( + VkDevice device, + uint32_t bindInfoCount, + const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos) { + const VkLayerDispatchTable *disp = loader_get_dispatch(device); + return disp->BindAccelerationStructureMemoryKHR(device, bindInfoCount, pBindInfos); +} + VKAPI_ATTR VkResult VKAPI_CALL BindAccelerationStructureMemoryNV( VkDevice device, uint32_t bindInfoCount, - const VkBindAccelerationStructureMemoryInfoNV* pBindInfos) { + const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos) { const VkLayerDispatchTable *disp = loader_get_dispatch(device); return disp->BindAccelerationStructureMemoryNV(device, bindInfoCount, pBindInfos); } @@ -3004,8 +3252,8 @@ VKAPI_ATTR void VKAPI_CALL CmdBuildAccelerationStructureNV( VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, - VkAccelerationStructureNV dst, - VkAccelerationStructureNV src, + VkAccelerationStructureKHR dst, + VkAccelerationStructureKHR src, VkBuffer scratch, VkDeviceSize scratchOffset) { const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); @@ -3014,9 +3262,9 @@ VKAPI_ATTR void VKAPI_CALL CmdBuildAccelerationStructureNV( VKAPI_ATTR void VKAPI_CALL CmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, - VkAccelerationStructureNV dst, - VkAccelerationStructureNV src, - VkCopyAccelerationStructureModeNV mode) { + VkAccelerationStructureKHR dst, + VkAccelerationStructureKHR src, + VkCopyAccelerationStructureModeKHR mode) { const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); disp->CmdCopyAccelerationStructureNV(commandBuffer, dst, src, mode); } @@ -3052,6 +3300,17 @@ VKAPI_ATTR VkResult VKAPI_CALL CreateRayTracingPipelinesNV( return disp->CreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); } +VKAPI_ATTR VkResult VKAPI_CALL GetRayTracingShaderGroupHandlesKHR( + VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void* pData) { + const VkLayerDispatchTable *disp = loader_get_dispatch(device); + return disp->GetRayTracingShaderGroupHandlesKHR(device, pipeline, firstGroup, groupCount, dataSize, pData); +} + VKAPI_ATTR VkResult VKAPI_CALL GetRayTracingShaderGroupHandlesNV( VkDevice device, VkPipeline pipeline, @@ -3065,17 +3324,28 @@ VKAPI_ATTR VkResult VKAPI_CALL GetRayTracingShaderGroupHandlesNV( VKAPI_ATTR VkResult VKAPI_CALL GetAccelerationStructureHandleNV( VkDevice device, - VkAccelerationStructureNV accelerationStructure, + VkAccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData) { const VkLayerDispatchTable *disp = loader_get_dispatch(device); return disp->GetAccelerationStructureHandleNV(device, accelerationStructure, dataSize, pData); } +VKAPI_ATTR void VKAPI_CALL CmdWriteAccelerationStructuresPropertiesKHR( + VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR* pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdWriteAccelerationStructuresPropertiesKHR(commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery); +} + VKAPI_ATTR void VKAPI_CALL CmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, - const VkAccelerationStructureNV* pAccelerationStructures, + const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) { @@ -3421,6 +3691,395 @@ VKAPI_ATTR void VKAPI_CALL ResetQueryPoolEXT( disp->ResetQueryPoolEXT(device, queryPool, firstQuery, queryCount); } + +// ---- VK_EXT_extended_dynamic_state extension trampoline/terminators + +VKAPI_ATTR void VKAPI_CALL CmdSetCullModeEXT( + VkCommandBuffer commandBuffer, + VkCullModeFlags cullMode) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdSetCullModeEXT(commandBuffer, cullMode); +} + +VKAPI_ATTR void VKAPI_CALL CmdSetFrontFaceEXT( + VkCommandBuffer commandBuffer, + VkFrontFace frontFace) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdSetFrontFaceEXT(commandBuffer, frontFace); +} + +VKAPI_ATTR void VKAPI_CALL CmdSetPrimitiveTopologyEXT( + VkCommandBuffer commandBuffer, + VkPrimitiveTopology primitiveTopology) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdSetPrimitiveTopologyEXT(commandBuffer, primitiveTopology); +} + +VKAPI_ATTR void VKAPI_CALL CmdSetViewportWithCountEXT( + VkCommandBuffer commandBuffer, + uint32_t viewportCount, + const VkViewport* pViewports) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdSetViewportWithCountEXT(commandBuffer, viewportCount, pViewports); +} + +VKAPI_ATTR void VKAPI_CALL CmdSetScissorWithCountEXT( + VkCommandBuffer commandBuffer, + uint32_t scissorCount, + const VkRect2D* pScissors) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdSetScissorWithCountEXT(commandBuffer, scissorCount, pScissors); +} + +VKAPI_ATTR void VKAPI_CALL CmdBindVertexBuffers2EXT( + VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets, + const VkDeviceSize* pSizes, + const VkDeviceSize* pStrides) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdBindVertexBuffers2EXT(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides); +} + +VKAPI_ATTR void VKAPI_CALL CmdSetDepthTestEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthTestEnable) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdSetDepthTestEnableEXT(commandBuffer, depthTestEnable); +} + +VKAPI_ATTR void VKAPI_CALL CmdSetDepthWriteEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthWriteEnable) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdSetDepthWriteEnableEXT(commandBuffer, depthWriteEnable); +} + +VKAPI_ATTR void VKAPI_CALL CmdSetDepthCompareOpEXT( + VkCommandBuffer commandBuffer, + VkCompareOp depthCompareOp) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdSetDepthCompareOpEXT(commandBuffer, depthCompareOp); +} + +VKAPI_ATTR void VKAPI_CALL CmdSetDepthBoundsTestEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthBoundsTestEnable) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdSetDepthBoundsTestEnableEXT(commandBuffer, depthBoundsTestEnable); +} + +VKAPI_ATTR void VKAPI_CALL CmdSetStencilTestEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 stencilTestEnable) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdSetStencilTestEnableEXT(commandBuffer, stencilTestEnable); +} + +VKAPI_ATTR void VKAPI_CALL CmdSetStencilOpEXT( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdSetStencilOpEXT(commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp); +} + + +// ---- VK_NV_device_generated_commands extension trampoline/terminators + +VKAPI_ATTR void VKAPI_CALL GetGeneratedCommandsMemoryRequirementsNV( + VkDevice device, + const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, + VkMemoryRequirements2* pMemoryRequirements) { + const VkLayerDispatchTable *disp = loader_get_dispatch(device); + disp->GetGeneratedCommandsMemoryRequirementsNV(device, pInfo, pMemoryRequirements); +} + +VKAPI_ATTR void VKAPI_CALL CmdPreprocessGeneratedCommandsNV( + VkCommandBuffer commandBuffer, + const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdPreprocessGeneratedCommandsNV(commandBuffer, pGeneratedCommandsInfo); +} + +VKAPI_ATTR void VKAPI_CALL CmdExecuteGeneratedCommandsNV( + VkCommandBuffer commandBuffer, + VkBool32 isPreprocessed, + const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdExecuteGeneratedCommandsNV(commandBuffer, isPreprocessed, pGeneratedCommandsInfo); +} + +VKAPI_ATTR void VKAPI_CALL CmdBindPipelineShaderGroupNV( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline, + uint32_t groupIndex) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdBindPipelineShaderGroupNV(commandBuffer, pipelineBindPoint, pipeline, groupIndex); +} + +VKAPI_ATTR VkResult VKAPI_CALL CreateIndirectCommandsLayoutNV( + VkDevice device, + const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkIndirectCommandsLayoutNV* pIndirectCommandsLayout) { + const VkLayerDispatchTable *disp = loader_get_dispatch(device); + return disp->CreateIndirectCommandsLayoutNV(device, pCreateInfo, pAllocator, pIndirectCommandsLayout); +} + +VKAPI_ATTR void VKAPI_CALL DestroyIndirectCommandsLayoutNV( + VkDevice device, + VkIndirectCommandsLayoutNV indirectCommandsLayout, + const VkAllocationCallbacks* pAllocator) { + const VkLayerDispatchTable *disp = loader_get_dispatch(device); + disp->DestroyIndirectCommandsLayoutNV(device, indirectCommandsLayout, pAllocator); +} + + +// ---- VK_EXT_private_data extension trampoline/terminators + +VKAPI_ATTR VkResult VKAPI_CALL CreatePrivateDataSlotEXT( + VkDevice device, + const VkPrivateDataSlotCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPrivateDataSlotEXT* pPrivateDataSlot) { + const VkLayerDispatchTable *disp = loader_get_dispatch(device); + return disp->CreatePrivateDataSlotEXT(device, pCreateInfo, pAllocator, pPrivateDataSlot); +} + +VKAPI_ATTR void VKAPI_CALL DestroyPrivateDataSlotEXT( + VkDevice device, + VkPrivateDataSlotEXT privateDataSlot, + const VkAllocationCallbacks* pAllocator) { + const VkLayerDispatchTable *disp = loader_get_dispatch(device); + disp->DestroyPrivateDataSlotEXT(device, privateDataSlot, pAllocator); +} + +VKAPI_ATTR VkResult VKAPI_CALL SetPrivateDataEXT( + VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlotEXT privateDataSlot, + uint64_t data) { + const VkLayerDispatchTable *disp = loader_get_dispatch(device); + return disp->SetPrivateDataEXT(device, objectType, objectHandle, privateDataSlot, data); +} + +VKAPI_ATTR void VKAPI_CALL GetPrivateDataEXT( + VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlotEXT privateDataSlot, + uint64_t* pData) { + const VkLayerDispatchTable *disp = loader_get_dispatch(device); + disp->GetPrivateDataEXT(device, objectType, objectHandle, privateDataSlot, pData); +} + + +// ---- VK_KHR_ray_tracing extension trampoline/terminators + +#ifdef VK_ENABLE_BETA_EXTENSIONS +VKAPI_ATTR VkResult VKAPI_CALL CreateAccelerationStructureKHR( + VkDevice device, + const VkAccelerationStructureCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkAccelerationStructureKHR* pAccelerationStructure) { + const VkLayerDispatchTable *disp = loader_get_dispatch(device); + return disp->CreateAccelerationStructureKHR(device, pCreateInfo, pAllocator, pAccelerationStructure); +} + +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +VKAPI_ATTR void VKAPI_CALL GetAccelerationStructureMemoryRequirementsKHR( + VkDevice device, + const VkAccelerationStructureMemoryRequirementsInfoKHR* pInfo, + VkMemoryRequirements2* pMemoryRequirements) { + const VkLayerDispatchTable *disp = loader_get_dispatch(device); + disp->GetAccelerationStructureMemoryRequirementsKHR(device, pInfo, pMemoryRequirements); +} + +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +VKAPI_ATTR void VKAPI_CALL CmdBuildAccelerationStructureKHR( + VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, + const VkAccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdBuildAccelerationStructureKHR(commandBuffer, infoCount, pInfos, ppOffsetInfos); +} + +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +VKAPI_ATTR void VKAPI_CALL CmdBuildAccelerationStructureIndirectKHR( + VkCommandBuffer commandBuffer, + const VkAccelerationStructureBuildGeometryInfoKHR* pInfo, + VkBuffer indirectBuffer, + VkDeviceSize indirectOffset, + uint32_t indirectStride) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdBuildAccelerationStructureIndirectKHR(commandBuffer, pInfo, indirectBuffer, indirectOffset, indirectStride); +} + +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +VKAPI_ATTR VkResult VKAPI_CALL BuildAccelerationStructureKHR( + VkDevice device, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, + const VkAccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos) { + const VkLayerDispatchTable *disp = loader_get_dispatch(device); + return disp->BuildAccelerationStructureKHR(device, infoCount, pInfos, ppOffsetInfos); +} + +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +VKAPI_ATTR VkResult VKAPI_CALL CopyAccelerationStructureKHR( + VkDevice device, + const VkCopyAccelerationStructureInfoKHR* pInfo) { + const VkLayerDispatchTable *disp = loader_get_dispatch(device); + return disp->CopyAccelerationStructureKHR(device, pInfo); +} + +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +VKAPI_ATTR VkResult VKAPI_CALL CopyAccelerationStructureToMemoryKHR( + VkDevice device, + const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo) { + const VkLayerDispatchTable *disp = loader_get_dispatch(device); + return disp->CopyAccelerationStructureToMemoryKHR(device, pInfo); +} + +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +VKAPI_ATTR VkResult VKAPI_CALL CopyMemoryToAccelerationStructureKHR( + VkDevice device, + const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo) { + const VkLayerDispatchTable *disp = loader_get_dispatch(device); + return disp->CopyMemoryToAccelerationStructureKHR(device, pInfo); +} + +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +VKAPI_ATTR VkResult VKAPI_CALL WriteAccelerationStructuresPropertiesKHR( + VkDevice device, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR* pAccelerationStructures, + VkQueryType queryType, + size_t dataSize, + void* pData, + size_t stride) { + const VkLayerDispatchTable *disp = loader_get_dispatch(device); + return disp->WriteAccelerationStructuresPropertiesKHR(device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride); +} + +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +VKAPI_ATTR void VKAPI_CALL CmdCopyAccelerationStructureKHR( + VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureInfoKHR* pInfo) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdCopyAccelerationStructureKHR(commandBuffer, pInfo); +} + +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +VKAPI_ATTR void VKAPI_CALL CmdCopyAccelerationStructureToMemoryKHR( + VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdCopyAccelerationStructureToMemoryKHR(commandBuffer, pInfo); +} + +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +VKAPI_ATTR void VKAPI_CALL CmdCopyMemoryToAccelerationStructureKHR( + VkCommandBuffer commandBuffer, + const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdCopyMemoryToAccelerationStructureKHR(commandBuffer, pInfo); +} + +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +VKAPI_ATTR void VKAPI_CALL CmdTraceRaysKHR( + VkCommandBuffer commandBuffer, + const VkStridedBufferRegionKHR* pRaygenShaderBindingTable, + const VkStridedBufferRegionKHR* pMissShaderBindingTable, + const VkStridedBufferRegionKHR* pHitShaderBindingTable, + const VkStridedBufferRegionKHR* pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdTraceRaysKHR(commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, width, height, depth); +} + +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +VKAPI_ATTR VkResult VKAPI_CALL CreateRayTracingPipelinesKHR( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines) { + const VkLayerDispatchTable *disp = loader_get_dispatch(device); + return disp->CreateRayTracingPipelinesKHR(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); +} + +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +VKAPI_ATTR VkDeviceAddress VKAPI_CALL GetAccelerationStructureDeviceAddressKHR( + VkDevice device, + const VkAccelerationStructureDeviceAddressInfoKHR* pInfo) { + const VkLayerDispatchTable *disp = loader_get_dispatch(device); + return disp->GetAccelerationStructureDeviceAddressKHR(device, pInfo); +} + +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +VKAPI_ATTR VkResult VKAPI_CALL GetRayTracingCaptureReplayShaderGroupHandlesKHR( + VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void* pData) { + const VkLayerDispatchTable *disp = loader_get_dispatch(device); + return disp->GetRayTracingCaptureReplayShaderGroupHandlesKHR(device, pipeline, firstGroup, groupCount, dataSize, pData); +} + +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +VKAPI_ATTR void VKAPI_CALL CmdTraceRaysIndirectKHR( + VkCommandBuffer commandBuffer, + const VkStridedBufferRegionKHR* pRaygenShaderBindingTable, + const VkStridedBufferRegionKHR* pMissShaderBindingTable, + const VkStridedBufferRegionKHR* pHitShaderBindingTable, + const VkStridedBufferRegionKHR* pCallableShaderBindingTable, + VkBuffer buffer, + VkDeviceSize offset) { + const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer); + disp->CmdTraceRaysIndirectKHR(commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, buffer, offset); +} + +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS +VKAPI_ATTR VkResult VKAPI_CALL GetDeviceAccelerationStructureCompatibilityKHR( + VkDevice device, + const VkAccelerationStructureVersionKHR* version) { + const VkLayerDispatchTable *disp = loader_get_dispatch(device); + return disp->GetDeviceAccelerationStructureCompatibilityKHR(device, version); +} + +#endif // VK_ENABLE_BETA_EXTENSIONS // GPA helpers for extensions bool extension_instance_gpa(struct loader_instance *ptr_instance, const char *name, void **addr) { *addr = NULL; @@ -3660,20 +4319,6 @@ bool extension_instance_gpa(struct loader_instance *ptr_instance, const char *na return true; } - // ---- VK_KHR_get_surface_capabilities2 extension commands - if (!strcmp("vkGetPhysicalDeviceSurfaceCapabilities2KHR", name)) { - *addr = (ptr_instance->enabled_known_extensions.khr_get_surface_capabilities2 == 1) - ? (void *)GetPhysicalDeviceSurfaceCapabilities2KHR - : NULL; - return true; - } - if (!strcmp("vkGetPhysicalDeviceSurfaceFormats2KHR", name)) { - *addr = (ptr_instance->enabled_known_extensions.khr_get_surface_capabilities2 == 1) - ? (void *)GetPhysicalDeviceSurfaceFormats2KHR - : NULL; - return true; - } - // ---- VK_KHR_get_memory_requirements2 extension commands if (!strcmp("vkGetImageMemoryRequirements2KHR", name)) { *addr = (void *)GetImageMemoryRequirements2KHR; @@ -3752,6 +4397,38 @@ bool extension_instance_gpa(struct loader_instance *ptr_instance, const char *na return true; } + // ---- VK_KHR_deferred_host_operations extension commands +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp("vkCreateDeferredOperationKHR", name)) { + *addr = (void *)CreateDeferredOperationKHR; + return true; + } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp("vkDestroyDeferredOperationKHR", name)) { + *addr = (void *)DestroyDeferredOperationKHR; + return true; + } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp("vkGetDeferredOperationMaxConcurrencyKHR", name)) { + *addr = (void *)GetDeferredOperationMaxConcurrencyKHR; + return true; + } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp("vkGetDeferredOperationResultKHR", name)) { + *addr = (void *)GetDeferredOperationResultKHR; + return true; + } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp("vkDeferredOperationJoinKHR", name)) { + *addr = (void *)DeferredOperationJoinKHR; + return true; + } +#endif // VK_ENABLE_BETA_EXTENSIONS + // ---- VK_KHR_pipeline_executable_properties extension commands if (!strcmp("vkGetPipelineExecutablePropertiesKHR", name)) { *addr = (void *)GetPipelineExecutablePropertiesKHR; @@ -3766,6 +4443,32 @@ bool extension_instance_gpa(struct loader_instance *ptr_instance, const char *na return true; } + // ---- VK_KHR_copy_commands2 extension commands + if (!strcmp("vkCmdCopyBuffer2KHR", name)) { + *addr = (void *)CmdCopyBuffer2KHR; + return true; + } + if (!strcmp("vkCmdCopyImage2KHR", name)) { + *addr = (void *)CmdCopyImage2KHR; + return true; + } + if (!strcmp("vkCmdCopyBufferToImage2KHR", name)) { + *addr = (void *)CmdCopyBufferToImage2KHR; + return true; + } + if (!strcmp("vkCmdCopyImageToBuffer2KHR", name)) { + *addr = (void *)CmdCopyImageToBuffer2KHR; + return true; + } + if (!strcmp("vkCmdBlitImage2KHR", name)) { + *addr = (void *)CmdBlitImage2KHR; + return true; + } + if (!strcmp("vkCmdResolveImage2KHR", name)) { + *addr = (void *)CmdResolveImage2KHR; + return true; + } + // ---- VK_EXT_debug_marker extension commands if (!strcmp("vkDebugMarkerSetObjectTagEXT", name)) { *addr = (void *)DebugMarkerSetObjectTagEXT; @@ -3819,6 +4522,10 @@ bool extension_instance_gpa(struct loader_instance *ptr_instance, const char *na *addr = (void *)GetImageViewHandleNVX; return true; } + if (!strcmp("vkGetImageViewAddressNVX", name)) { + *addr = (void *)GetImageViewAddressNVX; + return true; + } // ---- VK_AMD_draw_indirect_count extension commands if (!strcmp("vkCmdDrawIndirectCountAMD", name)) { @@ -3882,44 +4589,6 @@ bool extension_instance_gpa(struct loader_instance *ptr_instance, const char *na return true; } - // ---- VK_NVX_device_generated_commands extension commands - if (!strcmp("vkCmdProcessCommandsNVX", name)) { - *addr = (void *)CmdProcessCommandsNVX; - return true; - } - if (!strcmp("vkCmdReserveSpaceForCommandsNVX", name)) { - *addr = (void *)CmdReserveSpaceForCommandsNVX; - return true; - } - if (!strcmp("vkCreateIndirectCommandsLayoutNVX", name)) { - *addr = (void *)CreateIndirectCommandsLayoutNVX; - return true; - } - if (!strcmp("vkDestroyIndirectCommandsLayoutNVX", name)) { - *addr = (void *)DestroyIndirectCommandsLayoutNVX; - return true; - } - if (!strcmp("vkCreateObjectTableNVX", name)) { - *addr = (void *)CreateObjectTableNVX; - return true; - } - if (!strcmp("vkDestroyObjectTableNVX", name)) { - *addr = (void *)DestroyObjectTableNVX; - return true; - } - if (!strcmp("vkRegisterObjectsNVX", name)) { - *addr = (void *)RegisterObjectsNVX; - return true; - } - if (!strcmp("vkUnregisterObjectsNVX", name)) { - *addr = (void *)UnregisterObjectsNVX; - return true; - } - if (!strcmp("vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX", name)) { - *addr = (void *)GetPhysicalDeviceGeneratedCommandsPropertiesNVX; - return true; - } - // ---- VK_NV_clip_space_w_scaling extension commands if (!strcmp("vkCmdSetViewportWScalingNV", name)) { *addr = (void *)CmdSetViewportWScalingNV; @@ -4117,6 +4786,10 @@ bool extension_instance_gpa(struct loader_instance *ptr_instance, const char *na *addr = (void *)CreateAccelerationStructureNV; return true; } + if (!strcmp("vkDestroyAccelerationStructureKHR", name)) { + *addr = (void *)DestroyAccelerationStructureKHR; + return true; + } if (!strcmp("vkDestroyAccelerationStructureNV", name)) { *addr = (void *)DestroyAccelerationStructureNV; return true; @@ -4125,6 +4798,10 @@ bool extension_instance_gpa(struct loader_instance *ptr_instance, const char *na *addr = (void *)GetAccelerationStructureMemoryRequirementsNV; return true; } + if (!strcmp("vkBindAccelerationStructureMemoryKHR", name)) { + *addr = (void *)BindAccelerationStructureMemoryKHR; + return true; + } if (!strcmp("vkBindAccelerationStructureMemoryNV", name)) { *addr = (void *)BindAccelerationStructureMemoryNV; return true; @@ -4145,6 +4822,10 @@ bool extension_instance_gpa(struct loader_instance *ptr_instance, const char *na *addr = (void *)CreateRayTracingPipelinesNV; return true; } + if (!strcmp("vkGetRayTracingShaderGroupHandlesKHR", name)) { + *addr = (void *)GetRayTracingShaderGroupHandlesKHR; + return true; + } if (!strcmp("vkGetRayTracingShaderGroupHandlesNV", name)) { *addr = (void *)GetRayTracingShaderGroupHandlesNV; return true; @@ -4153,6 +4834,10 @@ bool extension_instance_gpa(struct loader_instance *ptr_instance, const char *na *addr = (void *)GetAccelerationStructureHandleNV; return true; } + if (!strcmp("vkCmdWriteAccelerationStructuresPropertiesKHR", name)) { + *addr = (void *)CmdWriteAccelerationStructuresPropertiesKHR; + return true; + } if (!strcmp("vkCmdWriteAccelerationStructuresPropertiesNV", name)) { *addr = (void *)CmdWriteAccelerationStructuresPropertiesNV; return true; @@ -4329,6 +5014,210 @@ bool extension_instance_gpa(struct loader_instance *ptr_instance, const char *na *addr = (void *)ResetQueryPoolEXT; return true; } + + // ---- VK_EXT_extended_dynamic_state extension commands + if (!strcmp("vkCmdSetCullModeEXT", name)) { + *addr = (void *)CmdSetCullModeEXT; + return true; + } + if (!strcmp("vkCmdSetFrontFaceEXT", name)) { + *addr = (void *)CmdSetFrontFaceEXT; + return true; + } + if (!strcmp("vkCmdSetPrimitiveTopologyEXT", name)) { + *addr = (void *)CmdSetPrimitiveTopologyEXT; + return true; + } + if (!strcmp("vkCmdSetViewportWithCountEXT", name)) { + *addr = (void *)CmdSetViewportWithCountEXT; + return true; + } + if (!strcmp("vkCmdSetScissorWithCountEXT", name)) { + *addr = (void *)CmdSetScissorWithCountEXT; + return true; + } + if (!strcmp("vkCmdBindVertexBuffers2EXT", name)) { + *addr = (void *)CmdBindVertexBuffers2EXT; + return true; + } + if (!strcmp("vkCmdSetDepthTestEnableEXT", name)) { + *addr = (void *)CmdSetDepthTestEnableEXT; + return true; + } + if (!strcmp("vkCmdSetDepthWriteEnableEXT", name)) { + *addr = (void *)CmdSetDepthWriteEnableEXT; + return true; + } + if (!strcmp("vkCmdSetDepthCompareOpEXT", name)) { + *addr = (void *)CmdSetDepthCompareOpEXT; + return true; + } + if (!strcmp("vkCmdSetDepthBoundsTestEnableEXT", name)) { + *addr = (void *)CmdSetDepthBoundsTestEnableEXT; + return true; + } + if (!strcmp("vkCmdSetStencilTestEnableEXT", name)) { + *addr = (void *)CmdSetStencilTestEnableEXT; + return true; + } + if (!strcmp("vkCmdSetStencilOpEXT", name)) { + *addr = (void *)CmdSetStencilOpEXT; + return true; + } + + // ---- VK_NV_device_generated_commands extension commands + if (!strcmp("vkGetGeneratedCommandsMemoryRequirementsNV", name)) { + *addr = (void *)GetGeneratedCommandsMemoryRequirementsNV; + return true; + } + if (!strcmp("vkCmdPreprocessGeneratedCommandsNV", name)) { + *addr = (void *)CmdPreprocessGeneratedCommandsNV; + return true; + } + if (!strcmp("vkCmdExecuteGeneratedCommandsNV", name)) { + *addr = (void *)CmdExecuteGeneratedCommandsNV; + return true; + } + if (!strcmp("vkCmdBindPipelineShaderGroupNV", name)) { + *addr = (void *)CmdBindPipelineShaderGroupNV; + return true; + } + if (!strcmp("vkCreateIndirectCommandsLayoutNV", name)) { + *addr = (void *)CreateIndirectCommandsLayoutNV; + return true; + } + if (!strcmp("vkDestroyIndirectCommandsLayoutNV", name)) { + *addr = (void *)DestroyIndirectCommandsLayoutNV; + return true; + } + + // ---- VK_EXT_private_data extension commands + if (!strcmp("vkCreatePrivateDataSlotEXT", name)) { + *addr = (void *)CreatePrivateDataSlotEXT; + return true; + } + if (!strcmp("vkDestroyPrivateDataSlotEXT", name)) { + *addr = (void *)DestroyPrivateDataSlotEXT; + return true; + } + if (!strcmp("vkSetPrivateDataEXT", name)) { + *addr = (void *)SetPrivateDataEXT; + return true; + } + if (!strcmp("vkGetPrivateDataEXT", name)) { + *addr = (void *)GetPrivateDataEXT; + return true; + } + + // ---- VK_KHR_ray_tracing extension commands +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp("vkCreateAccelerationStructureKHR", name)) { + *addr = (void *)CreateAccelerationStructureKHR; + return true; + } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp("vkGetAccelerationStructureMemoryRequirementsKHR", name)) { + *addr = (void *)GetAccelerationStructureMemoryRequirementsKHR; + return true; + } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp("vkCmdBuildAccelerationStructureKHR", name)) { + *addr = (void *)CmdBuildAccelerationStructureKHR; + return true; + } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp("vkCmdBuildAccelerationStructureIndirectKHR", name)) { + *addr = (void *)CmdBuildAccelerationStructureIndirectKHR; + return true; + } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp("vkBuildAccelerationStructureKHR", name)) { + *addr = (void *)BuildAccelerationStructureKHR; + return true; + } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp("vkCopyAccelerationStructureKHR", name)) { + *addr = (void *)CopyAccelerationStructureKHR; + return true; + } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp("vkCopyAccelerationStructureToMemoryKHR", name)) { + *addr = (void *)CopyAccelerationStructureToMemoryKHR; + return true; + } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp("vkCopyMemoryToAccelerationStructureKHR", name)) { + *addr = (void *)CopyMemoryToAccelerationStructureKHR; + return true; + } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp("vkWriteAccelerationStructuresPropertiesKHR", name)) { + *addr = (void *)WriteAccelerationStructuresPropertiesKHR; + return true; + } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp("vkCmdCopyAccelerationStructureKHR", name)) { + *addr = (void *)CmdCopyAccelerationStructureKHR; + return true; + } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp("vkCmdCopyAccelerationStructureToMemoryKHR", name)) { + *addr = (void *)CmdCopyAccelerationStructureToMemoryKHR; + return true; + } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp("vkCmdCopyMemoryToAccelerationStructureKHR", name)) { + *addr = (void *)CmdCopyMemoryToAccelerationStructureKHR; + return true; + } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp("vkCmdTraceRaysKHR", name)) { + *addr = (void *)CmdTraceRaysKHR; + return true; + } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp("vkCreateRayTracingPipelinesKHR", name)) { + *addr = (void *)CreateRayTracingPipelinesKHR; + return true; + } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp("vkGetAccelerationStructureDeviceAddressKHR", name)) { + *addr = (void *)GetAccelerationStructureDeviceAddressKHR; + return true; + } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp("vkGetRayTracingCaptureReplayShaderGroupHandlesKHR", name)) { + *addr = (void *)GetRayTracingCaptureReplayShaderGroupHandlesKHR; + return true; + } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp("vkCmdTraceRaysIndirectKHR", name)) { + *addr = (void *)CmdTraceRaysIndirectKHR; + return true; + } +#endif // VK_ENABLE_BETA_EXTENSIONS +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (!strcmp("vkGetDeviceAccelerationStructureCompatibilityKHR", name)) { + *addr = (void *)GetDeviceAccelerationStructureCompatibilityKHR; + return true; + } +#endif // VK_ENABLE_BETA_EXTENSIONS return false; } @@ -4356,10 +5245,6 @@ void extensions_create_instance(struct loader_instance *ptr_instance, const VkIn } else if (0 == strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME)) { ptr_instance->enabled_known_extensions.khr_external_fence_capabilities = 1; - // ---- VK_KHR_get_surface_capabilities2 extension commands - } else if (0 == strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME)) { - ptr_instance->enabled_known_extensions.khr_get_surface_capabilities2 = 1; - // ---- VK_GGP_stream_descriptor_surface extension commands #ifdef VK_USE_PLATFORM_GGP } else if (0 == strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_GGP_STREAM_DESCRIPTOR_SURFACE_EXTENSION_NAME)) { @@ -4462,7 +5347,7 @@ PFN_vkVoidFunction get_extension_device_proc_terminator(struct loader_device *de addr = (PFN_vkVoidFunction)terminator_GetDeviceGroupSurfacePresentModes2EXT; } } -#endif // None +#endif // VK_ENABLE_BETA_EXTENSIONS return addr; } @@ -4607,9 +5492,6 @@ const VkLayerInstanceDispatchTable instance_disp = { .CreateViSurfaceNN = terminator_CreateViSurfaceNN, #endif // VK_USE_PLATFORM_VI_NN - // ---- VK_NVX_device_generated_commands extension commands - .GetPhysicalDeviceGeneratedCommandsPropertiesNVX = terminator_GetPhysicalDeviceGeneratedCommandsPropertiesNVX, - // ---- VK_EXT_direct_mode_display extension commands .ReleaseDisplayEXT = terminator_ReleaseDisplayEXT, @@ -4671,6 +5553,14 @@ const VkLayerInstanceDispatchTable instance_disp = { // ---- VK_EXT_headless_surface extension commands .CreateHeadlessSurfaceEXT = terminator_CreateHeadlessSurfaceEXT, + + // ---- VK_EXT_directfb_surface extension commands +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + .CreateDirectFBSurfaceEXT = terminator_CreateDirectFBSurfaceEXT, +#endif // VK_USE_PLATFORM_DIRECTFB_EXT +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + .GetPhysicalDeviceDirectFBPresentationSupportEXT = terminator_GetPhysicalDeviceDirectFBPresentationSupportEXT, +#endif // VK_USE_PLATFORM_DIRECTFB_EXT }; // A null-terminated list of all of the instance extensions supported by the loader. @@ -4730,5 +5620,8 @@ const char *const LOADER_INSTANCE_EXTENSIONS[] = { #endif // VK_USE_PLATFORM_METAL_EXT VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME, VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME, +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + VK_EXT_DIRECTFB_SURFACE_EXTENSION_NAME, +#endif // VK_USE_PLATFORM_DIRECTFB_EXT NULL }; diff --git a/thirdparty/vulkan/loader/vk_loader_extensions.h b/thirdparty/vulkan/loader/vk_loader_extensions.h index a50f3cdd326..9835e16a070 100644 --- a/thirdparty/vulkan/loader/vk_loader_extensions.h +++ b/thirdparty/vulkan/loader/vk_loader_extensions.h @@ -356,9 +356,6 @@ struct loader_icd_term_dispatch { PFN_vkCreateViSurfaceNN CreateViSurfaceNN; #endif // VK_USE_PLATFORM_VI_NN - // ---- VK_NVX_device_generated_commands extension commands - PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX GetPhysicalDeviceGeneratedCommandsPropertiesNVX; - // ---- VK_EXT_direct_mode_display extension commands PFN_vkReleaseDisplayEXT ReleaseDisplayEXT; @@ -431,6 +428,14 @@ struct loader_icd_term_dispatch { // ---- VK_EXT_headless_surface extension commands PFN_vkCreateHeadlessSurfaceEXT CreateHeadlessSurfaceEXT; + + // ---- VK_EXT_directfb_surface extension commands +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + PFN_vkCreateDirectFBSurfaceEXT CreateDirectFBSurfaceEXT; +#endif // VK_USE_PLATFORM_DIRECTFB_EXT +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT GetPhysicalDeviceDirectFBPresentationSupportEXT; +#endif // VK_USE_PLATFORM_DIRECTFB_EXT }; union loader_instance_extension_enables { @@ -440,7 +445,6 @@ union loader_instance_extension_enables { uint8_t khr_external_memory_capabilities : 1; uint8_t khr_external_semaphore_capabilities : 1; uint8_t khr_external_fence_capabilities : 1; - uint8_t khr_get_surface_capabilities2 : 1; uint8_t ext_debug_report : 1; uint8_t ggp_stream_descriptor_surface : 1; uint8_t nv_external_memory_capabilities : 1; diff --git a/thirdparty/vulkan/loader/vk_loader_platform.h b/thirdparty/vulkan/loader/vk_loader_platform.h index 62e8e3ae095..e227bf07b2d 100644 --- a/thirdparty/vulkan/loader/vk_loader_platform.h +++ b/thirdparty/vulkan/loader/vk_loader_platform.h @@ -38,7 +38,6 @@ //#ifndef _GNU_SOURCE //#define _GNU_SOURCE 1 //#endif -// TBD: Are the contents of the following file used? #include // Note: The following file is for dynamic loading: #include @@ -100,13 +99,43 @@ static inline bool loader_platform_is_path_absolute(const char *path) { static inline char *loader_platform_dirname(char *path) { return dirname(path); } +#if defined(__linux__) + +// find application path + name. Path cannot be longer than 1024, returns NULL if it is greater than that. +static inline char *loader_platform_executable_path(char *buffer, size_t size) { + ssize_t count = readlink("/proc/self/exe", buffer, size); + if (count == -1) return NULL; + if (count == 0) return NULL; + buffer[count] = '\0'; + return buffer; +} +#elif defined(__APPLE__) // defined(__linux__) +#include +static inline char *loader_platform_executable_path(char *buffer, size_t size) { + pid_t pid = getpid(); + int ret = proc_pidpath(pid, buffer, size); + if (ret <= 0) return NULL; + buffer[ret] = '\0'; + return buffer; +} +#endif // defined (__APPLE__) + +// Compatability with compilers that don't support __has_feature +#ifndef __has_feature +#define __has_feature(x) 0 +#endif + +#if __has_feature(address_sanitizer) || defined(__SANITIZE_ADDRESS__) +#define LOADER_ADDRESS_SANITIZER +#endif + // Dynamic Loading of libraries: typedef void *loader_platform_dl_handle; static inline loader_platform_dl_handle loader_platform_open_library(const char *libPath) { - // When loading the library, we use RTLD_LAZY so that not all symbols have to be - // resolved at this time (which improves performance). Note that if not all symbols - // can be resolved, this could cause crashes later. Use the LD_BIND_NOW environment - // variable to force all symbols to be resolved here. +// When loading the library, we use RTLD_LAZY so that not all symbols have to be +// resolved at this time (which improves performance). Note that if not all symbols +// can be resolved, this could cause crashes later. Use the LD_BIND_NOW environment +// variable to force all symbols to be resolved here. return dlopen(libPath, RTLD_LAZY | RTLD_LOCAL); } static inline const char *loader_platform_open_library_error(const char *libPath) { return dlerror(); } @@ -162,6 +191,7 @@ static inline void loader_platform_thread_cond_broadcast(loader_platform_thread_ #include #include #include +#include #ifdef __cplusplus #include #include @@ -279,6 +309,14 @@ static inline char *loader_platform_dirname(char *path) { return path; } +static inline char *loader_platform_executable_path(char *buffer, size_t size) { + DWORD ret = GetModuleFileName(NULL, buffer, (DWORD)size); + if (ret == 0) return NULL; + if (ret > size) return NULL; + buffer[ret] = '\0'; + return buffer; +} + // Dynamic Loading: typedef HMODULE loader_platform_dl_handle; static loader_platform_dl_handle loader_platform_open_library(const char *lib_path) { @@ -330,25 +368,9 @@ typedef HANDLE loader_platform_thread; // The once init functionality is not used when building a DLL on Windows. This is because there is no way to clean up the // resources allocated by anything allocated by once init. This isn't a problem for static libraries, but it is for dynamic // ones. When building a DLL, we use DllMain() instead to allow properly cleaning up resources. -#if defined(LOADER_DYNAMIC_LIB) #define LOADER_PLATFORM_THREAD_ONCE_DECLARATION(var) #define LOADER_PLATFORM_THREAD_ONCE_DEFINITION(var) #define LOADER_PLATFORM_THREAD_ONCE(ctl, func) -#else -#define LOADER_PLATFORM_THREAD_ONCE_DECLARATION(var) INIT_ONCE var = INIT_ONCE_STATIC_INIT; -#define LOADER_PLATFORM_THREAD_ONCE_DEFINITION(var) INIT_ONCE var; -#define LOADER_PLATFORM_THREAD_ONCE(ctl, func) loader_platform_thread_once_fn(ctl, func) -static BOOL CALLBACK InitFuncWrapper(PINIT_ONCE InitOnce, PVOID Parameter, PVOID *Context) { - void (*func)(void) = (void (*)(void))Parameter; - func(); - return TRUE; -} -static void loader_platform_thread_once_fn(void *ctl, void (*func)(void)) { - assert(func != NULL); - assert(ctl != NULL); - InitOnceExecuteOnce((PINIT_ONCE)ctl, InitFuncWrapper, (void *)func, NULL); -} -#endif // Thread IDs: typedef DWORD loader_platform_thread_id; diff --git a/thirdparty/vulkan/loader/vk_object_types.h b/thirdparty/vulkan/loader/vk_object_types.h index 7fdf77aa6a2..16d22c8241b 100644 --- a/thirdparty/vulkan/loader/vk_object_types.h +++ b/thirdparty/vulkan/loader/vk_object_types.h @@ -37,16 +37,16 @@ // Object Type enum for validation layer internal object handling typedef enum VulkanObjectType { kVulkanObjectTypeUnknown = 0, - kVulkanObjectTypeInstance = 1, - kVulkanObjectTypePhysicalDevice = 2, - kVulkanObjectTypeDevice = 3, - kVulkanObjectTypeQueue = 4, - kVulkanObjectTypeSemaphore = 5, - kVulkanObjectTypeCommandBuffer = 6, - kVulkanObjectTypeFence = 7, - kVulkanObjectTypeDeviceMemory = 8, - kVulkanObjectTypeBuffer = 9, - kVulkanObjectTypeImage = 10, + kVulkanObjectTypeBuffer = 1, + kVulkanObjectTypeImage = 2, + kVulkanObjectTypeInstance = 3, + kVulkanObjectTypePhysicalDevice = 4, + kVulkanObjectTypeDevice = 5, + kVulkanObjectTypeQueue = 6, + kVulkanObjectTypeSemaphore = 7, + kVulkanObjectTypeCommandBuffer = 8, + kVulkanObjectTypeFence = 9, + kVulkanObjectTypeDeviceMemory = 10, kVulkanObjectTypeEvent = 11, kVulkanObjectTypeQueryPool = 12, kVulkanObjectTypeBufferView = 13, @@ -54,12 +54,12 @@ typedef enum VulkanObjectType { kVulkanObjectTypeShaderModule = 15, kVulkanObjectTypePipelineCache = 16, kVulkanObjectTypePipelineLayout = 17, - kVulkanObjectTypeRenderPass = 18, - kVulkanObjectTypePipeline = 19, + kVulkanObjectTypePipeline = 18, + kVulkanObjectTypeRenderPass = 19, kVulkanObjectTypeDescriptorSetLayout = 20, kVulkanObjectTypeSampler = 21, - kVulkanObjectTypeDescriptorPool = 22, - kVulkanObjectTypeDescriptorSet = 23, + kVulkanObjectTypeDescriptorSet = 22, + kVulkanObjectTypeDescriptorPool = 23, kVulkanObjectTypeFramebuffer = 24, kVulkanObjectTypeCommandPool = 25, kVulkanObjectTypeSamplerYcbcrConversion = 26, @@ -68,22 +68,26 @@ typedef enum VulkanObjectType { kVulkanObjectTypeSwapchainKHR = 29, kVulkanObjectTypeDisplayKHR = 30, kVulkanObjectTypeDisplayModeKHR = 31, - kVulkanObjectTypeDebugReportCallbackEXT = 32, - kVulkanObjectTypeObjectTableNVX = 33, - kVulkanObjectTypeIndirectCommandsLayoutNVX = 34, - kVulkanObjectTypeDebugUtilsMessengerEXT = 35, - kVulkanObjectTypeValidationCacheEXT = 36, - kVulkanObjectTypeAccelerationStructureNV = 37, - kVulkanObjectTypePerformanceConfigurationINTEL = 38, - kVulkanObjectTypeMax = 39, + kVulkanObjectTypeDeferredOperationKHR = 32, + kVulkanObjectTypeDebugReportCallbackEXT = 33, + kVulkanObjectTypeDebugUtilsMessengerEXT = 34, + kVulkanObjectTypeValidationCacheEXT = 35, + kVulkanObjectTypeAccelerationStructureKHR = 36, + kVulkanObjectTypePerformanceConfigurationINTEL = 37, + kVulkanObjectTypeIndirectCommandsLayoutNV = 38, + kVulkanObjectTypePrivateDataSlotEXT = 39, + kVulkanObjectTypeMax = 40, // Aliases for backwards compatibilty of "promoted" types kVulkanObjectTypeDescriptorUpdateTemplateKHR = kVulkanObjectTypeDescriptorUpdateTemplate, kVulkanObjectTypeSamplerYcbcrConversionKHR = kVulkanObjectTypeSamplerYcbcrConversion, + kVulkanObjectTypeAccelerationStructureNV = kVulkanObjectTypeAccelerationStructureKHR, } VulkanObjectType; // Array of object name strings for OBJECT_TYPE enum conversion static const char * const object_string[kVulkanObjectTypeMax] = { "Unknown", + "Buffer", + "Image", "Instance", "PhysicalDevice", "Device", @@ -92,8 +96,6 @@ static const char * const object_string[kVulkanObjectTypeMax] = { "CommandBuffer", "Fence", "DeviceMemory", - "Buffer", - "Image", "Event", "QueryPool", "BufferView", @@ -101,12 +103,12 @@ static const char * const object_string[kVulkanObjectTypeMax] = { "ShaderModule", "PipelineCache", "PipelineLayout", - "RenderPass", "Pipeline", + "RenderPass", "DescriptorSetLayout", "Sampler", - "DescriptorPool", "DescriptorSet", + "DescriptorPool", "Framebuffer", "CommandPool", "SamplerYcbcrConversion", @@ -115,18 +117,21 @@ static const char * const object_string[kVulkanObjectTypeMax] = { "SwapchainKHR", "DisplayKHR", "DisplayModeKHR", + "DeferredOperationKHR", "DebugReportCallbackEXT", - "ObjectTableNVX", - "IndirectCommandsLayoutNVX", "DebugUtilsMessengerEXT", "ValidationCacheEXT", - "AccelerationStructureNV", + "AccelerationStructureKHR", "PerformanceConfigurationINTEL", + "IndirectCommandsLayoutNV", + "PrivateDataSlotEXT", }; // Helper array to get Vulkan VK_EXT_debug_report object type enum from the internal layers version const VkDebugReportObjectTypeEXT get_debug_report_enum[] = { VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, // kVulkanObjectTypeUnknown + VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, // kVulkanObjectTypeBuffer + VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, // kVulkanObjectTypeImage VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, // kVulkanObjectTypeInstance VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, // kVulkanObjectTypePhysicalDevice VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, // kVulkanObjectTypeDevice @@ -135,8 +140,6 @@ const VkDebugReportObjectTypeEXT get_debug_report_enum[] = { VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, // kVulkanObjectTypeCommandBuffer VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, // kVulkanObjectTypeFence VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, // kVulkanObjectTypeDeviceMemory - VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, // kVulkanObjectTypeBuffer - VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, // kVulkanObjectTypeImage VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, // kVulkanObjectTypeEvent VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, // kVulkanObjectTypeQueryPool VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, // kVulkanObjectTypeBufferView @@ -144,12 +147,12 @@ const VkDebugReportObjectTypeEXT get_debug_report_enum[] = { VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, // kVulkanObjectTypeShaderModule VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, // kVulkanObjectTypePipelineCache VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, // kVulkanObjectTypePipelineLayout - VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, // kVulkanObjectTypeRenderPass VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, // kVulkanObjectTypePipeline + VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, // kVulkanObjectTypeRenderPass VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, // kVulkanObjectTypeDescriptorSetLayout VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, // kVulkanObjectTypeSampler - VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, // kVulkanObjectTypeDescriptorPool VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, // kVulkanObjectTypeDescriptorSet + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, // kVulkanObjectTypeDescriptorPool VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, // kVulkanObjectTypeFramebuffer VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, // kVulkanObjectTypeCommandPool VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, // kVulkanObjectTypeSamplerYcbcrConversion @@ -158,18 +161,21 @@ const VkDebugReportObjectTypeEXT get_debug_report_enum[] = { VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, // kVulkanObjectTypeSwapchainKHR VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT, // kVulkanObjectTypeDisplayKHR VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT, // kVulkanObjectTypeDisplayModeKHR + VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, // kVulkanObjectTypeDeferredOperationKHR VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, // kVulkanObjectTypeDebugReportCallbackEXT - VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT, // kVulkanObjectTypeObjectTableNVX - VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT, // kVulkanObjectTypeIndirectCommandsLayoutNVX VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, // kVulkanObjectTypeDebugUtilsMessengerEXT VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, // kVulkanObjectTypeValidationCacheEXT - VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT, // kVulkanObjectTypeAccelerationStructureNV + VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT, // kVulkanObjectTypeAccelerationStructureKHR VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, // kVulkanObjectTypePerformanceConfigurationINTEL + VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, // kVulkanObjectTypeIndirectCommandsLayoutNV + VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, // kVulkanObjectTypePrivateDataSlotEXT }; // Helper array to get Official Vulkan VkObjectType enum from the internal layers version const VkObjectType get_object_type_enum[] = { VK_OBJECT_TYPE_UNKNOWN, // kVulkanObjectTypeUnknown + VK_OBJECT_TYPE_BUFFER, // kVulkanObjectTypeBuffer + VK_OBJECT_TYPE_IMAGE, // kVulkanObjectTypeImage VK_OBJECT_TYPE_INSTANCE, // kVulkanObjectTypeInstance VK_OBJECT_TYPE_PHYSICAL_DEVICE, // kVulkanObjectTypePhysicalDevice VK_OBJECT_TYPE_DEVICE, // kVulkanObjectTypeDevice @@ -178,8 +184,6 @@ const VkObjectType get_object_type_enum[] = { VK_OBJECT_TYPE_COMMAND_BUFFER, // kVulkanObjectTypeCommandBuffer VK_OBJECT_TYPE_FENCE, // kVulkanObjectTypeFence VK_OBJECT_TYPE_DEVICE_MEMORY, // kVulkanObjectTypeDeviceMemory - VK_OBJECT_TYPE_BUFFER, // kVulkanObjectTypeBuffer - VK_OBJECT_TYPE_IMAGE, // kVulkanObjectTypeImage VK_OBJECT_TYPE_EVENT, // kVulkanObjectTypeEvent VK_OBJECT_TYPE_QUERY_POOL, // kVulkanObjectTypeQueryPool VK_OBJECT_TYPE_BUFFER_VIEW, // kVulkanObjectTypeBufferView @@ -187,12 +191,12 @@ const VkObjectType get_object_type_enum[] = { VK_OBJECT_TYPE_SHADER_MODULE, // kVulkanObjectTypeShaderModule VK_OBJECT_TYPE_PIPELINE_CACHE, // kVulkanObjectTypePipelineCache VK_OBJECT_TYPE_PIPELINE_LAYOUT, // kVulkanObjectTypePipelineLayout - VK_OBJECT_TYPE_RENDER_PASS, // kVulkanObjectTypeRenderPass VK_OBJECT_TYPE_PIPELINE, // kVulkanObjectTypePipeline + VK_OBJECT_TYPE_RENDER_PASS, // kVulkanObjectTypeRenderPass VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT, // kVulkanObjectTypeDescriptorSetLayout VK_OBJECT_TYPE_SAMPLER, // kVulkanObjectTypeSampler - VK_OBJECT_TYPE_DESCRIPTOR_POOL, // kVulkanObjectTypeDescriptorPool VK_OBJECT_TYPE_DESCRIPTOR_SET, // kVulkanObjectTypeDescriptorSet + VK_OBJECT_TYPE_DESCRIPTOR_POOL, // kVulkanObjectTypeDescriptorPool VK_OBJECT_TYPE_FRAMEBUFFER, // kVulkanObjectTypeFramebuffer VK_OBJECT_TYPE_COMMAND_POOL, // kVulkanObjectTypeCommandPool VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, // kVulkanObjectTypeSamplerYcbcrConversion @@ -201,13 +205,14 @@ const VkObjectType get_object_type_enum[] = { VK_OBJECT_TYPE_SWAPCHAIN_KHR, // kVulkanObjectTypeSwapchainKHR VK_OBJECT_TYPE_DISPLAY_KHR, // kVulkanObjectTypeDisplayKHR VK_OBJECT_TYPE_DISPLAY_MODE_KHR, // kVulkanObjectTypeDisplayModeKHR + VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR, // kVulkanObjectTypeDeferredOperationKHR VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT, // kVulkanObjectTypeDebugReportCallbackEXT - VK_OBJECT_TYPE_OBJECT_TABLE_NVX, // kVulkanObjectTypeObjectTableNVX - VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX, // kVulkanObjectTypeIndirectCommandsLayoutNVX VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT, // kVulkanObjectTypeDebugUtilsMessengerEXT VK_OBJECT_TYPE_VALIDATION_CACHE_EXT, // kVulkanObjectTypeValidationCacheEXT - VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV, // kVulkanObjectTypeAccelerationStructureNV + VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR, // kVulkanObjectTypeAccelerationStructureKHR VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL, // kVulkanObjectTypePerformanceConfigurationINTEL + VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV, // kVulkanObjectTypeIndirectCommandsLayoutNV + VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT, // kVulkanObjectTypePrivateDataSlotEXT }; // Helper function to convert from VkDebugReportObjectTypeEXT to VkObjectType @@ -282,10 +287,8 @@ static inline VkObjectType convertDebugReportObjectToCoreObject(VkDebugReportObj return VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT; } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT) { return VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR; - } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT) { - return VK_OBJECT_TYPE_OBJECT_TABLE_NVX; - } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT) { - return VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX; + } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT) { + return VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR; } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT) { return VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR; } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT) { @@ -368,10 +371,8 @@ static inline VkDebugReportObjectTypeEXT convertCoreObjectToDebugReportObject(Vk return VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT; } else if (core_report_obj == VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR) { return VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT; - } else if (core_report_obj == VK_OBJECT_TYPE_OBJECT_TABLE_NVX) { - return VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT; - } else if (core_report_obj == VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX) { - return VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT; + } else if (core_report_obj == VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR) { + return VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT; } else if (core_report_obj == VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR) { return VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT; } else if (core_report_obj == VK_OBJECT_TYPE_VALIDATION_CACHE_EXT) { diff --git a/thirdparty/vulkan/loader/wsi.c b/thirdparty/vulkan/loader/wsi.c index f8d8e5374c4..804046cf95a 100644 --- a/thirdparty/vulkan/loader/wsi.c +++ b/thirdparty/vulkan/loader/wsi.c @@ -52,6 +52,9 @@ void wsi_create_instance(struct loader_instance *ptr_instance, const VkInstanceC #ifdef VK_USE_PLATFORM_XLIB_KHR ptr_instance->wsi_xlib_surface_enabled = false; #endif // VK_USE_PLATFORM_XLIB_KHR +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + ptr_instance->wsi_directfb_surface_enabled = false; +#endif // VK_USE_PLATFORM_DIRECTFB_EXT #ifdef VK_USE_PLATFORM_ANDROID_KHR ptr_instance->wsi_android_surface_enabled = false; #endif // VK_USE_PLATFORM_ANDROID_KHR @@ -96,6 +99,12 @@ void wsi_create_instance(struct loader_instance *ptr_instance, const VkInstanceC continue; } #endif // VK_USE_PLATFORM_XLIB_KHR +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_EXT_DIRECTFB_SURFACE_EXTENSION_NAME) == 0) { + ptr_instance->wsi_directfb_surface_enabled = true; + continue; + } +#endif // VK_USE_PLATFORM_DIRECTFB_EXT #ifdef VK_USE_PLATFORM_ANDROID_KHR if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_ANDROID_SURFACE_EXTENSION_NAME) == 0) { ptr_instance->wsi_android_surface_enabled = true; @@ -154,6 +163,9 @@ bool wsi_unsupported_instance_extension(const VkExtensionProperties *ext_prop) { #ifndef VK_USE_PLATFORM_XLIB_KHR if (!strcmp(ext_prop->extensionName, "VK_KHR_xlib_surface")) return true; #endif // VK_USE_PLATFORM_XLIB_KHR +#ifndef VK_USE_PLATFORM_DIRECTFB_EXT + if (!strcmp(ext_prop->extensionName, "VK_EXT_directfb_surface")) return true; +#endif // VK_USE_PLATFORM_DIRECTFB_EXT return false; } @@ -942,6 +954,124 @@ VKAPI_ATTR VkBool32 VKAPI_CALL terminator_GetPhysicalDeviceXlibPresentationSuppo } #endif // VK_USE_PLATFORM_XLIB_KHR +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + +// Functions for the VK_EXT_directfb_surface extension: + +// This is the trampoline entrypoint for CreateDirectFBSurfaceEXT +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDirectFBSurfaceEXT(VkInstance instance, + const VkDirectFBSurfaceCreateInfoEXT *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkSurfaceKHR *pSurface) { + const VkLayerInstanceDispatchTable *disp; + disp = loader_get_instance_layer_dispatch(instance); + VkResult res; + + res = disp->CreateDirectFBSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface); + return res; +} + +// This is the instance chain terminator function for CreateDirectFBSurfaceEXT +VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateDirectFBSurfaceEXT(VkInstance instance, + const VkDirectFBSurfaceCreateInfoEXT *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkSurfaceKHR *pSurface) { + VkResult vkRes = VK_SUCCESS; + VkIcdSurface *pIcdSurface = NULL; + uint32_t i = 0; + + // First, check to ensure the appropriate extension was enabled: + struct loader_instance *ptr_instance = loader_get_instance(instance); + if (!ptr_instance->wsi_directfb_surface_enabled) { + loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "VK_EXT_directfb_surface extension not enabled. vkCreateDirectFBSurfaceEXT not executed!\n"); + vkRes = VK_ERROR_EXTENSION_NOT_PRESENT; + goto out; + } + + // Next, if so, proceed with the implementation of this function: + pIcdSurface = + AllocateIcdSurfaceStruct(ptr_instance, sizeof(pIcdSurface->directfb_surf.base), sizeof(pIcdSurface->directfb_surf)); + if (pIcdSurface == NULL) { + vkRes = VK_ERROR_OUT_OF_HOST_MEMORY; + goto out; + } + + pIcdSurface->directfb_surf.base.platform = VK_ICD_WSI_PLATFORM_DIRECTFB; + pIcdSurface->directfb_surf.dfb = pCreateInfo->dfb; + pIcdSurface->directfb_surf.surface = pCreateInfo->surface; + + // Loop through each ICD and determine if they need to create a surface + for (struct loader_icd_term *icd_term = ptr_instance->icd_terms; icd_term != NULL; icd_term = icd_term->next, i++) { + if (icd_term->scanned_icd->interface_version >= ICD_VER_SUPPORTS_ICD_SURFACE_KHR) { + if (NULL != icd_term->dispatch.CreateDirectFBSurfaceEXT) { + vkRes = icd_term->dispatch.CreateDirectFBSurfaceEXT(icd_term->instance, pCreateInfo, pAllocator, + &pIcdSurface->real_icd_surfaces[i]); + if (VK_SUCCESS != vkRes) { + goto out; + } + } + } + } + + *pSurface = (VkSurfaceKHR)pIcdSurface; + +out: + + if (VK_SUCCESS != vkRes && NULL != pIcdSurface) { + if (NULL != pIcdSurface->real_icd_surfaces) { + i = 0; + for (struct loader_icd_term *icd_term = ptr_instance->icd_terms; icd_term != NULL; icd_term = icd_term->next, i++) { + if ((VkSurfaceKHR)NULL != pIcdSurface->real_icd_surfaces[i] && NULL != icd_term->dispatch.DestroySurfaceKHR) { + icd_term->dispatch.DestroySurfaceKHR(icd_term->instance, pIcdSurface->real_icd_surfaces[i], pAllocator); + } + } + loader_instance_heap_free(ptr_instance, pIcdSurface->real_icd_surfaces); + } + loader_instance_heap_free(ptr_instance, pIcdSurface); + } + + return vkRes; +} + +// This is the trampoline entrypoint for +// GetPhysicalDeviceDirectFBPresentationSupportEXT +LOADER_EXPORT VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceDirectFBPresentationSupportEXT(VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + IDirectFB *dfb) { + VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice); + const VkLayerInstanceDispatchTable *disp; + disp = loader_get_instance_layer_dispatch(physicalDevice); + VkBool32 res = disp->GetPhysicalDeviceDirectFBPresentationSupportEXT(unwrapped_phys_dev, queueFamilyIndex, dfb); + return res; +} + +// This is the instance chain terminator function for +// GetPhysicalDeviceDirectFBPresentationSupportEXT +VKAPI_ATTR VkBool32 VKAPI_CALL terminator_GetPhysicalDeviceDirectFBPresentationSupportEXT(VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + IDirectFB *dfb) { + // First, check to ensure the appropriate extension was enabled: + struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice; + struct loader_icd_term *icd_term = phys_dev_term->this_icd_term; + struct loader_instance *ptr_instance = (struct loader_instance *)icd_term->this_instance; + if (!ptr_instance->wsi_directfb_surface_enabled) { + loader_log( + ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "VK_EXT_directfb_surface extension not enabled. vkGetPhysicalDeviceWaylandPresentationSupportKHR not executed!\n"); + return VK_SUCCESS; + } + + if (NULL == icd_term->dispatch.GetPhysicalDeviceDirectFBPresentationSupportEXT) { + loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "ICD for selected physical device is not exporting vkGetPhysicalDeviceDirectFBPresentationSupportEXT!\n"); + assert(false && "loader: null GetPhysicalDeviceDirectFBPresentationSupportEXT ICD pointer"); + } + + return icd_term->dispatch.GetPhysicalDeviceDirectFBPresentationSupportEXT(phys_dev_term->phys_dev, queueFamilyIndex, dfb); +} +#endif // VK_USE_PLATFORM_DIRECTFB_EXT + #ifdef VK_USE_PLATFORM_ANDROID_KHR // Functions for the VK_KHR_android_surface extension: @@ -1806,6 +1936,164 @@ VKAPI_ATTR VkResult VKAPI_CALL terminator_GetDisplayPlaneCapabilities2KHR(VkPhys pDisplayPlaneInfo->planeIndex, &pCapabilities->capabilities); } +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkGetPhysicalDeviceSurfaceCapabilities2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo, + VkSurfaceCapabilities2KHR *pSurfaceCapabilities) { + const VkLayerInstanceDispatchTable *disp; + VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice); + disp = loader_get_instance_layer_dispatch(physicalDevice); + return disp->GetPhysicalDeviceSurfaceCapabilities2KHR(unwrapped_phys_dev, pSurfaceInfo, pSurfaceCapabilities); +} + +VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceSurfaceCapabilities2KHR( + VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo, + VkSurfaceCapabilities2KHR *pSurfaceCapabilities) { + struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice; + struct loader_icd_term *icd_term = phys_dev_term->this_icd_term; + struct loader_instance *ptr_instance = (struct loader_instance *)icd_term->this_instance; + + if (!ptr_instance->wsi_surface_enabled) { + loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "VK_KHR_surface extension not enabled. vkGetPhysicalDeviceSurfaceCapabilities2KHR not executed!\n"); + return VK_SUCCESS; + } + + VkIcdSurface *icd_surface = (VkIcdSurface *)(pSurfaceInfo->surface); + uint8_t icd_index = phys_dev_term->icd_index; + + if (icd_term->dispatch.GetPhysicalDeviceSurfaceCapabilities2KHR != NULL) { + VkBaseOutStructure *pNext = (VkBaseOutStructure *)pSurfaceCapabilities->pNext; + while (pNext != NULL) { + if ((int)pNext->sType == VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR) { + // Not all ICDs may be supporting VK_KHR_surface_protected_capabilities + // Initialize VkSurfaceProtectedCapabilitiesKHR.supportsProtected to false and + // if an ICD supports protected surfaces, it will reset it to true accordingly. + ((VkSurfaceProtectedCapabilitiesKHR *)pNext)->supportsProtected = VK_FALSE; + } + pNext = (VkBaseOutStructure *)pNext->pNext; + } + + // Pass the call to the driver, possibly unwrapping the ICD surface + if (icd_surface->real_icd_surfaces != NULL && (void *)icd_surface->real_icd_surfaces[icd_index] != NULL) { + VkPhysicalDeviceSurfaceInfo2KHR info_copy = *pSurfaceInfo; + info_copy.surface = icd_surface->real_icd_surfaces[icd_index]; + return icd_term->dispatch.GetPhysicalDeviceSurfaceCapabilities2KHR(phys_dev_term->phys_dev, &info_copy, + pSurfaceCapabilities); + } else { + return icd_term->dispatch.GetPhysicalDeviceSurfaceCapabilities2KHR(phys_dev_term->phys_dev, pSurfaceInfo, + pSurfaceCapabilities); + } + } else { + // Emulate the call + loader_log(icd_term->this_instance, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0, + "vkGetPhysicalDeviceSurfaceCapabilities2KHR: Emulating call in ICD \"%s\" using " + "vkGetPhysicalDeviceSurfaceCapabilitiesKHR", + icd_term->scanned_icd->lib_name); + + if (pSurfaceInfo->pNext != NULL) { + loader_log(icd_term->this_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, + "vkGetPhysicalDeviceSurfaceCapabilities2KHR: Emulation found unrecognized structure type in " + "pSurfaceInfo->pNext - this struct will be ignored"); + } + + // Write to the VkSurfaceCapabilities2KHR struct + VkSurfaceKHR surface = pSurfaceInfo->surface; + if (icd_surface->real_icd_surfaces != NULL && (void *)icd_surface->real_icd_surfaces[icd_index] != NULL) { + surface = icd_surface->real_icd_surfaces[icd_index]; + } + VkResult res = icd_term->dispatch.GetPhysicalDeviceSurfaceCapabilitiesKHR(phys_dev_term->phys_dev, surface, + &pSurfaceCapabilities->surfaceCapabilities); + + if (pSurfaceCapabilities->pNext != NULL) { + loader_log(icd_term->this_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, + "vkGetPhysicalDeviceSurfaceCapabilities2KHR: Emulation found unrecognized structure type in " + "pSurfaceCapabilities->pNext - this struct will be ignored"); + } + return res; + } +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo, + uint32_t *pSurfaceFormatCount, VkSurfaceFormat2KHR *pSurfaceFormats) { + const VkLayerInstanceDispatchTable *disp; + VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice); + disp = loader_get_instance_layer_dispatch(physicalDevice); + return disp->GetPhysicalDeviceSurfaceFormats2KHR(unwrapped_phys_dev, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats); +} + +VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo, + uint32_t *pSurfaceFormatCount, + VkSurfaceFormat2KHR *pSurfaceFormats) { + struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice; + struct loader_icd_term *icd_term = phys_dev_term->this_icd_term; + struct loader_instance *ptr_instance = (struct loader_instance *)icd_term->this_instance; + + if (!ptr_instance->wsi_surface_enabled) { + loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "VK_KHR_surface extension not enabled. vkGetPhysicalDeviceSurfaceFormats2KHR not executed!\n"); + return VK_SUCCESS; + } + + VkIcdSurface *icd_surface = (VkIcdSurface *)(pSurfaceInfo->surface); + uint8_t icd_index = phys_dev_term->icd_index; + + if (icd_term->dispatch.GetPhysicalDeviceSurfaceFormats2KHR != NULL) { + // Pass the call to the driver, possibly unwrapping the ICD surface + if (icd_surface->real_icd_surfaces != NULL && (void *)icd_surface->real_icd_surfaces[icd_index] != NULL) { + VkPhysicalDeviceSurfaceInfo2KHR info_copy = *pSurfaceInfo; + info_copy.surface = icd_surface->real_icd_surfaces[icd_index]; + return icd_term->dispatch.GetPhysicalDeviceSurfaceFormats2KHR(phys_dev_term->phys_dev, &info_copy, pSurfaceFormatCount, + pSurfaceFormats); + } else { + return icd_term->dispatch.GetPhysicalDeviceSurfaceFormats2KHR(phys_dev_term->phys_dev, pSurfaceInfo, + pSurfaceFormatCount, pSurfaceFormats); + } + } else { + // Emulate the call + loader_log(icd_term->this_instance, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0, + "vkGetPhysicalDeviceSurfaceFormats2KHR: Emulating call in ICD \"%s\" using vkGetPhysicalDeviceSurfaceFormatsKHR", + icd_term->scanned_icd->lib_name); + + if (pSurfaceInfo->pNext != NULL) { + loader_log(icd_term->this_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, + "vkGetPhysicalDeviceSurfaceFormats2KHR: Emulation found unrecognized structure type in pSurfaceInfo->pNext " + "- this struct will be ignored"); + } + + VkSurfaceKHR surface = pSurfaceInfo->surface; + if (icd_surface->real_icd_surfaces != NULL && (void *)icd_surface->real_icd_surfaces[icd_index] != NULL) { + surface = icd_surface->real_icd_surfaces[icd_index]; + } + + if (*pSurfaceFormatCount == 0 || pSurfaceFormats == NULL) { + // Write to pSurfaceFormatCount + return icd_term->dispatch.GetPhysicalDeviceSurfaceFormatsKHR(phys_dev_term->phys_dev, surface, pSurfaceFormatCount, + NULL); + } else { + // Allocate a temporary array for the output of the old function + VkSurfaceFormatKHR *formats = loader_stack_alloc(*pSurfaceFormatCount * sizeof(VkSurfaceFormatKHR)); + if (formats == NULL) { + return VK_ERROR_OUT_OF_HOST_MEMORY; + } + + VkResult res = icd_term->dispatch.GetPhysicalDeviceSurfaceFormatsKHR(phys_dev_term->phys_dev, surface, + pSurfaceFormatCount, formats); + for (uint32_t i = 0; i < *pSurfaceFormatCount; ++i) { + pSurfaceFormats[i].surfaceFormat = formats[i]; + if (pSurfaceFormats[i].pNext != NULL) { + loader_log(icd_term->this_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, + "vkGetPhysicalDeviceSurfaceFormats2KHR: Emulation found unrecognized structure type in " + "pSurfaceFormats[%d].pNext - this struct will be ignored", + i); + } + } + return res; + } + } +} + bool wsi_swapchain_instance_gpa(struct loader_instance *ptr_instance, const char *name, void **addr) { *addr = NULL; @@ -1846,6 +2134,17 @@ bool wsi_swapchain_instance_gpa(struct loader_instance *ptr_instance, const char return true; } + // Functions for VK_KHR_get_surface_capabilities2 extension: + if (!strcmp("vkGetPhysicalDeviceSurfaceCapabilities2KHR", name)) { + *addr = ptr_instance->wsi_surface_enabled ? (void *)vkGetPhysicalDeviceSurfaceCapabilities2KHR : NULL; + return true; + } + + if (!strcmp("vkGetPhysicalDeviceSurfaceFormats2KHR", name)) { + *addr = ptr_instance->wsi_surface_enabled ? (void *)vkGetPhysicalDeviceSurfaceFormats2KHR : NULL; + return true; + } + // Functions for the VK_KHR_swapchain extension: // Note: This is a device extension, and its functions are statically @@ -1925,6 +2224,18 @@ bool wsi_swapchain_instance_gpa(struct loader_instance *ptr_instance, const char return true; } #endif // VK_USE_PLATFORM_XLIB_KHR +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + + // Functions for the VK_EXT_directfb_surface extension: + if (!strcmp("vkCreateDirectFBSurfaceEXT", name)) { + *addr = ptr_instance->wsi_directfb_surface_enabled ? (void *)vkCreateDirectFBSurfaceEXT : NULL; + return true; + } + if (!strcmp("vkGetPhysicalDeviceDirectFBPresentationSupportEXT", name)) { + *addr = ptr_instance->wsi_directfb_surface_enabled ? (void *)vkGetPhysicalDeviceDirectFBPresentationSupportEXT : NULL; + return true; + } +#endif // VK_USE_PLATFORM_DIRECTFB_EXT #ifdef VK_USE_PLATFORM_ANDROID_KHR // Functions for the VK_KHR_android_surface extension: diff --git a/thirdparty/vulkan/loader/wsi.h b/thirdparty/vulkan/loader/wsi.h index 3e44efa9e97..e6c699f58a2 100644 --- a/thirdparty/vulkan/loader/wsi.h +++ b/thirdparty/vulkan/loader/wsi.h @@ -39,6 +39,9 @@ typedef struct { #ifdef VK_USE_PLATFORM_XLIB_KHR VkIcdSurfaceXlib xlib_surf; #endif // VK_USE_PLATFORM_XLIB_KHR +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT + VkIcdSurfaceDirectFB directfb_surf; +#endif // VK_USE_PLATFORM_DIRECTFB_EXT #ifdef VK_USE_PLATFORM_MACOS_MVK VkIcdSurfaceMacOS macos_surf; #endif // VK_USE_PLATFORM_MACOS_MVK @@ -121,6 +124,14 @@ VKAPI_ATTR VkBool32 VKAPI_CALL terminator_GetPhysicalDeviceXlibPresentationSuppo uint32_t queueFamilyIndex, Display *dpy, VisualID visualID); #endif +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT +VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateDirectFBSurfaceEXT(VkInstance instance, + const VkDirectFBSurfaceCreateInfoEXT *pCreateInfo, + const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface); +VKAPI_ATTR VkBool32 VKAPI_CALL terminator_GetPhysicalDeviceDirectFBPresentationSupportEXT(VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + IDirectFB *dfb); +#endif #ifdef VK_USE_PLATFORM_MACOS_MVK VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateMacOSSurfaceMVK(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface); @@ -181,4 +192,13 @@ VKAPI_ATTR VkResult VKAPI_CALL terminator_GetDisplayPlaneCapabilities2KHR(VkPhys const VkDisplayPlaneInfo2KHR *pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR *pCapabilities); +VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceSurfaceCapabilities2KHR( + VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo, + VkSurfaceCapabilities2KHR *pSurfaceCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo, + uint32_t *pSurfaceFormatCount, + VkSurfaceFormat2KHR *pSurfaceFormats); + #endif // WSI_H diff --git a/thirdparty/vulkan/patches/Vulkan-Loader-revert-pr260.patch b/thirdparty/vulkan/patches/Vulkan-Loader-revert-pr260.patch deleted file mode 100644 index 1267e560df3..00000000000 --- a/thirdparty/vulkan/patches/Vulkan-Loader-revert-pr260.patch +++ /dev/null @@ -1,57 +0,0 @@ -diff --git a/thirdparty/vulkan/loader/loader.c b/thirdparty/vulkan/loader/loader.c -index 87d08d5116..c7cdb47122 100644 ---- a/thirdparty/vulkan/loader/loader.c -+++ b/thirdparty/vulkan/loader/loader.c -@@ -7330,7 +7330,7 @@ out: - return result; - } - --#if defined(_WIN32) -+#if defined(_WIN32) && defined(LOADER_DYNAMIC_LIB) - BOOL WINAPI DllMain(HINSTANCE hinst, DWORD reason, LPVOID reserved) { - switch (reason) { - case DLL_PROCESS_ATTACH: -diff --git a/thirdparty/vulkan/loader/loader.h b/thirdparty/vulkan/loader/loader.h -index 5e9495521b..56745a968d 100644 ---- a/thirdparty/vulkan/loader/loader.h -+++ b/thirdparty/vulkan/loader/loader.h -@@ -420,6 +420,9 @@ static inline void loader_init_dispatch(void *obj, const void *data) { - // Global variables used across files - extern struct loader_struct loader; - extern THREAD_LOCAL_DECL struct loader_instance *tls_instance; -+#if defined(_WIN32) && !defined(LOADER_DYNAMIC_LIB) -+extern LOADER_PLATFORM_THREAD_ONCE_DEFINITION(once_init); -+#endif - extern loader_platform_thread_mutex loader_lock; - extern loader_platform_thread_mutex loader_json_lock; - -diff --git a/thirdparty/vulkan/loader/vk_loader_platform.h b/thirdparty/vulkan/loader/vk_loader_platform.h -index 7824e35d6b..62e8e3ae09 100644 ---- a/thirdparty/vulkan/loader/vk_loader_platform.h -+++ b/thirdparty/vulkan/loader/vk_loader_platform.h -@@ -330,9 +330,25 @@ typedef HANDLE loader_platform_thread; - // The once init functionality is not used when building a DLL on Windows. This is because there is no way to clean up the - // resources allocated by anything allocated by once init. This isn't a problem for static libraries, but it is for dynamic - // ones. When building a DLL, we use DllMain() instead to allow properly cleaning up resources. -+#if defined(LOADER_DYNAMIC_LIB) - #define LOADER_PLATFORM_THREAD_ONCE_DECLARATION(var) - #define LOADER_PLATFORM_THREAD_ONCE_DEFINITION(var) - #define LOADER_PLATFORM_THREAD_ONCE(ctl, func) -+#else -+#define LOADER_PLATFORM_THREAD_ONCE_DECLARATION(var) INIT_ONCE var = INIT_ONCE_STATIC_INIT; -+#define LOADER_PLATFORM_THREAD_ONCE_DEFINITION(var) INIT_ONCE var; -+#define LOADER_PLATFORM_THREAD_ONCE(ctl, func) loader_platform_thread_once_fn(ctl, func) -+static BOOL CALLBACK InitFuncWrapper(PINIT_ONCE InitOnce, PVOID Parameter, PVOID *Context) { -+ void (*func)(void) = (void (*)(void))Parameter; -+ func(); -+ return TRUE; -+} -+static void loader_platform_thread_once_fn(void *ctl, void (*func)(void)) { -+ assert(func != NULL); -+ assert(ctl != NULL); -+ InitOnceExecuteOnce((PINIT_ONCE)ctl, InitFuncWrapper, (void *)func, NULL); -+} -+#endif - - // Thread IDs: - typedef DWORD loader_platform_thread_id; diff --git a/thirdparty/vulkan/vk_enum_string_helper.h b/thirdparty/vulkan/vk_enum_string_helper.h index 1c99b31270f..39884deeed3 100644 --- a/thirdparty/vulkan/vk_enum_string_helper.h +++ b/thirdparty/vulkan/vk_enum_string_helper.h @@ -39,17 +39,6 @@ #include -static inline const char* string_VkPipelineCacheHeaderVersion(VkPipelineCacheHeaderVersion input_value) -{ - switch ((VkPipelineCacheHeaderVersion)input_value) - { - case VK_PIPELINE_CACHE_HEADER_VERSION_ONE: - return "VK_PIPELINE_CACHE_HEADER_VERSION_ONE"; - default: - return "Unhandled VkPipelineCacheHeaderVersion"; - } -} - static inline const char* string_VkResult(VkResult input_value) { switch ((VkResult)input_value) @@ -72,6 +61,8 @@ static inline const char* string_VkResult(VkResult input_value) return "VK_ERROR_INCOMPATIBLE_DISPLAY_KHR"; case VK_ERROR_INCOMPATIBLE_DRIVER: return "VK_ERROR_INCOMPATIBLE_DRIVER"; + case VK_ERROR_INCOMPATIBLE_VERSION_KHR: + return "VK_ERROR_INCOMPATIBLE_VERSION_KHR"; case VK_ERROR_INITIALIZATION_FAILED: return "VK_ERROR_INITIALIZATION_FAILED"; case VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT: @@ -114,10 +105,20 @@ static inline const char* string_VkResult(VkResult input_value) return "VK_INCOMPLETE"; case VK_NOT_READY: return "VK_NOT_READY"; + case VK_OPERATION_DEFERRED_KHR: + return "VK_OPERATION_DEFERRED_KHR"; + case VK_OPERATION_NOT_DEFERRED_KHR: + return "VK_OPERATION_NOT_DEFERRED_KHR"; + case VK_PIPELINE_COMPILE_REQUIRED_EXT: + return "VK_PIPELINE_COMPILE_REQUIRED_EXT"; case VK_SUBOPTIMAL_KHR: return "VK_SUBOPTIMAL_KHR"; case VK_SUCCESS: return "VK_SUCCESS"; + case VK_THREAD_DONE_KHR: + return "VK_THREAD_DONE_KHR"; + case VK_THREAD_IDLE_KHR: + return "VK_THREAD_IDLE_KHR"; case VK_TIMEOUT: return "VK_TIMEOUT"; default: @@ -129,12 +130,32 @@ static inline const char* string_VkStructureType(VkStructureType input_value) { switch ((VkStructureType)input_value) { + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_GEOMETRY_TYPE_INFO_KHR: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_GEOMETRY_TYPE_INFO_KHR"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR"; case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV: return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR"; case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV: return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR"; case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV: return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_KHR: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_KHR"; case VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR: return "VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR"; case VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR: @@ -157,8 +178,8 @@ static inline const char* string_VkStructureType(VkStructureType input_value) return "VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2"; case VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT: return "VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT"; - case VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV: - return "VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV"; + case VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_KHR: + return "VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_KHR"; case VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO: return "VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO"; case VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO: @@ -173,12 +194,18 @@ static inline const char* string_VkStructureType(VkStructureType input_value) return "VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO"; case VK_STRUCTURE_TYPE_BIND_SPARSE_INFO: return "VK_STRUCTURE_TYPE_BIND_SPARSE_INFO"; + case VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR: + return "VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR"; + case VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR: + return "VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR"; case VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO: return "VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO"; case VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT: return "VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT"; case VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO: return "VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO"; + case VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR: + return "VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR"; case VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER: return "VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER"; case VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2: @@ -191,10 +218,6 @@ static inline const char* string_VkStructureType(VkStructureType input_value) return "VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT"; case VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV: return "VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV"; - case VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX: - return "VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX"; - case VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX: - return "VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX"; case VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO: return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO"; case VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO: @@ -203,6 +226,8 @@ static inline const char* string_VkStructureType(VkStructureType input_value) return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT"; case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO: return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO"; + case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM: + return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM"; case VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO: return "VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO"; case VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO: @@ -211,8 +236,22 @@ static inline const char* string_VkStructureType(VkStructureType input_value) return "VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT"; case VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV: return "VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR: + return "VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR"; + case VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR: + return "VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR"; + case VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR: + return "VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR"; + case VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR: + return "VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR"; case VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET: return "VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET"; + case VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR: + return "VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR"; + case VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR: + return "VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR"; + case VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR: + return "VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR"; case VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR: return "VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR"; case VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT: @@ -239,6 +278,8 @@ static inline const char* string_VkStructureType(VkStructureType input_value) return "VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV"; case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV: return "VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV"; + case VK_STRUCTURE_TYPE_DEFERRED_OPERATION_INFO_KHR: + return "VK_STRUCTURE_TYPE_DEFERRED_OPERATION_INFO_KHR"; case VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO: return "VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO"; case VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT: @@ -259,12 +300,10 @@ static inline const char* string_VkStructureType(VkStructureType input_value) return "VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO"; case VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO: return "VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV"; case VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT: return "VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT"; - case VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX: - return "VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX"; - case VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX: - return "VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX"; case VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO: return "VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO"; case VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO: @@ -285,12 +324,16 @@ static inline const char* string_VkStructureType(VkStructureType input_value) return "VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO"; case VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD: return "VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD"; + case VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT"; case VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO: return "VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO"; case VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT: return "VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT"; case VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2: return "VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2"; + case VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT"; case VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT: return "VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT"; case VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR: @@ -369,6 +412,10 @@ static inline const char* string_VkStructureType(VkStructureType input_value) return "VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO"; case VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV: return "VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV"; + case VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV: + return "VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV"; + case VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV: + return "VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV"; case VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV: return "VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV"; case VK_STRUCTURE_TYPE_GEOMETRY_NV: @@ -377,12 +424,20 @@ static inline const char* string_VkStructureType(VkStructureType input_value) return "VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV"; case VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO: return "VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV"; case VK_STRUCTURE_TYPE_HDR_METADATA_EXT: return "VK_STRUCTURE_TYPE_HDR_METADATA_EXT"; case VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT: return "VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT"; case VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA: return "VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA"; + case VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR: + return "VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR"; + case VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR: + return "VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR"; case VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO: return "VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO"; case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT: @@ -401,12 +456,16 @@ static inline const char* string_VkStructureType(VkStructureType input_value) return "VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2"; case VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO: return "VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO"; + case VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR: + return "VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR"; case VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2: return "VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2"; case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO: return "VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO"; case VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR: return "VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX: + return "VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX"; case VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT: return "VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT"; case VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO: @@ -433,8 +492,10 @@ static inline const char* string_VkStructureType(VkStructureType input_value) return "VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR"; case VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR: return "VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR"; - case VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX: - return "VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX"; + case VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV: + return "VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV"; case VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL: return "VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL"; case VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO: @@ -481,8 +542,6 @@ static inline const char* string_VkStructureType(VkStructureType input_value) return "VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT"; case VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT: return "VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT"; - case VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX: - return "VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX"; case VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL: return "VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL"; case VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR: @@ -499,6 +558,8 @@ static inline const char* string_VkStructureType(VkStructureType input_value) return "VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT: @@ -527,6 +588,10 @@ static inline const char* string_VkStructureType(VkStructureType input_value) return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT: @@ -537,12 +602,20 @@ static inline const char* string_VkStructureType(VkStructureType input_value) return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO: @@ -557,6 +630,10 @@ static inline const char* string_VkStructureType(VkStructureType input_value) return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT: @@ -577,6 +654,8 @@ static inline const char* string_VkStructureType(VkStructureType input_value) return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT: @@ -613,10 +692,18 @@ static inline const char* string_VkStructureType(VkStructureType input_value) return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES: @@ -625,10 +712,18 @@ static inline const char* string_VkStructureType(VkStructureType input_value) return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_KHR"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES: @@ -639,6 +734,8 @@ static inline const char* string_VkStructureType(VkStructureType input_value) return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR: @@ -749,6 +846,8 @@ static inline const char* string_VkStructureType(VkStructureType input_value) return "VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO"; case VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO: return "VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR"; case VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO: return "VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO"; case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT: @@ -799,20 +898,28 @@ static inline const char* string_VkStructureType(VkStructureType input_value) return "VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR"; case VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE: return "VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE"; + case VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT"; case VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO: return "VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO"; case VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO: return "VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO"; - case VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL: - return "VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL"; case VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR: return "VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL: + return "VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL"; case VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV: return "VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV"; case VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2: return "VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2"; + case VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR"; case VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV: return "VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR"; case VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV: return "VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV"; case VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO: @@ -831,8 +938,14 @@ static inline const char* string_VkStructureType(VkStructureType input_value) return "VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO"; case VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT: return "VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT"; + case VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM: + return "VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM"; + case VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR: + return "VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR"; case VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO: return "VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO"; + case VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT"; case VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO: return "VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO"; case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO: @@ -921,8 +1034,8 @@ static inline const char* string_VkStructureType(VkStructureType input_value) return "VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR"; case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET: return "VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET"; - case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV: - return "VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV"; + case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR: + return "VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR"; case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT: return "VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT"; case VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR: @@ -934,6 +1047,300 @@ static inline const char* string_VkStructureType(VkStructureType input_value) } } +static inline const char* string_VkAccessFlagBits(VkAccessFlagBits input_value) +{ + switch ((VkAccessFlagBits)input_value) + { + case VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR: + return "VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR"; + case VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR: + return "VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR"; + case VK_ACCESS_COLOR_ATTACHMENT_READ_BIT: + return "VK_ACCESS_COLOR_ATTACHMENT_READ_BIT"; + case VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT: + return "VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT"; + case VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT: + return "VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT"; + case VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV: + return "VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV"; + case VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV: + return "VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV"; + case VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT: + return "VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT"; + case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT: + return "VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT"; + case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT: + return "VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT"; + case VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT: + return "VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT"; + case VK_ACCESS_HOST_READ_BIT: + return "VK_ACCESS_HOST_READ_BIT"; + case VK_ACCESS_HOST_WRITE_BIT: + return "VK_ACCESS_HOST_WRITE_BIT"; + case VK_ACCESS_INDEX_READ_BIT: + return "VK_ACCESS_INDEX_READ_BIT"; + case VK_ACCESS_INDIRECT_COMMAND_READ_BIT: + return "VK_ACCESS_INDIRECT_COMMAND_READ_BIT"; + case VK_ACCESS_INPUT_ATTACHMENT_READ_BIT: + return "VK_ACCESS_INPUT_ATTACHMENT_READ_BIT"; + case VK_ACCESS_MEMORY_READ_BIT: + return "VK_ACCESS_MEMORY_READ_BIT"; + case VK_ACCESS_MEMORY_WRITE_BIT: + return "VK_ACCESS_MEMORY_WRITE_BIT"; + case VK_ACCESS_SHADER_READ_BIT: + return "VK_ACCESS_SHADER_READ_BIT"; + case VK_ACCESS_SHADER_WRITE_BIT: + return "VK_ACCESS_SHADER_WRITE_BIT"; + case VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV: + return "VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV"; + case VK_ACCESS_TRANSFER_READ_BIT: + return "VK_ACCESS_TRANSFER_READ_BIT"; + case VK_ACCESS_TRANSFER_WRITE_BIT: + return "VK_ACCESS_TRANSFER_WRITE_BIT"; + case VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT: + return "VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT"; + case VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT: + return "VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT"; + case VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT: + return "VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT"; + case VK_ACCESS_UNIFORM_READ_BIT: + return "VK_ACCESS_UNIFORM_READ_BIT"; + case VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT: + return "VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT"; + default: + return "Unhandled VkAccessFlagBits"; + } +} + +static inline std::string string_VkAccessFlags(VkAccessFlags input_value) +{ + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkAccessFlagBits(static_cast(1 << index))); + } + ++index; + input_value >>= 1; + } + if( ret.empty()) ret.append(string_VkAccessFlagBits(static_cast(0))); + return ret; +} + +static inline const char* string_VkImageLayout(VkImageLayout input_value) +{ + switch ((VkImageLayout)input_value) + { + case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: + return "VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL"; + case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL: + return "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL"; + case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL: + return "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL"; + case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL: + return "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL"; + case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL: + return "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL"; + case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: + return "VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL"; + case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL: + return "VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL"; + case VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT: + return "VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT"; + case VK_IMAGE_LAYOUT_GENERAL: + return "VK_IMAGE_LAYOUT_GENERAL"; + case VK_IMAGE_LAYOUT_PREINITIALIZED: + return "VK_IMAGE_LAYOUT_PREINITIALIZED"; + case VK_IMAGE_LAYOUT_PRESENT_SRC_KHR: + return "VK_IMAGE_LAYOUT_PRESENT_SRC_KHR"; + case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL: + return "VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL"; + case VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV: + return "VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV"; + case VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR: + return "VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR"; + case VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL: + return "VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL"; + case VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL: + return "VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL"; + case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL: + return "VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL"; + case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL: + return "VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL"; + case VK_IMAGE_LAYOUT_UNDEFINED: + return "VK_IMAGE_LAYOUT_UNDEFINED"; + default: + return "Unhandled VkImageLayout"; + } +} + +static inline const char* string_VkImageAspectFlagBits(VkImageAspectFlagBits input_value) +{ + switch ((VkImageAspectFlagBits)input_value) + { + case VK_IMAGE_ASPECT_COLOR_BIT: + return "VK_IMAGE_ASPECT_COLOR_BIT"; + case VK_IMAGE_ASPECT_DEPTH_BIT: + return "VK_IMAGE_ASPECT_DEPTH_BIT"; + case VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT: + return "VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT"; + case VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT: + return "VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT"; + case VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT: + return "VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT"; + case VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT: + return "VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT"; + case VK_IMAGE_ASPECT_METADATA_BIT: + return "VK_IMAGE_ASPECT_METADATA_BIT"; + case VK_IMAGE_ASPECT_PLANE_0_BIT: + return "VK_IMAGE_ASPECT_PLANE_0_BIT"; + case VK_IMAGE_ASPECT_PLANE_1_BIT: + return "VK_IMAGE_ASPECT_PLANE_1_BIT"; + case VK_IMAGE_ASPECT_PLANE_2_BIT: + return "VK_IMAGE_ASPECT_PLANE_2_BIT"; + case VK_IMAGE_ASPECT_STENCIL_BIT: + return "VK_IMAGE_ASPECT_STENCIL_BIT"; + default: + return "Unhandled VkImageAspectFlagBits"; + } +} + +static inline std::string string_VkImageAspectFlags(VkImageAspectFlags input_value) +{ + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkImageAspectFlagBits(static_cast(1 << index))); + } + ++index; + input_value >>= 1; + } + if( ret.empty()) ret.append(string_VkImageAspectFlagBits(static_cast(0))); + return ret; +} + +static inline const char* string_VkObjectType(VkObjectType input_value) +{ + switch ((VkObjectType)input_value) + { + case VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR: + return "VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR"; + case VK_OBJECT_TYPE_BUFFER: + return "VK_OBJECT_TYPE_BUFFER"; + case VK_OBJECT_TYPE_BUFFER_VIEW: + return "VK_OBJECT_TYPE_BUFFER_VIEW"; + case VK_OBJECT_TYPE_COMMAND_BUFFER: + return "VK_OBJECT_TYPE_COMMAND_BUFFER"; + case VK_OBJECT_TYPE_COMMAND_POOL: + return "VK_OBJECT_TYPE_COMMAND_POOL"; + case VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT: + return "VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT"; + case VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT: + return "VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT"; + case VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR: + return "VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR"; + case VK_OBJECT_TYPE_DESCRIPTOR_POOL: + return "VK_OBJECT_TYPE_DESCRIPTOR_POOL"; + case VK_OBJECT_TYPE_DESCRIPTOR_SET: + return "VK_OBJECT_TYPE_DESCRIPTOR_SET"; + case VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT: + return "VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT"; + case VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE: + return "VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE"; + case VK_OBJECT_TYPE_DEVICE: + return "VK_OBJECT_TYPE_DEVICE"; + case VK_OBJECT_TYPE_DEVICE_MEMORY: + return "VK_OBJECT_TYPE_DEVICE_MEMORY"; + case VK_OBJECT_TYPE_DISPLAY_KHR: + return "VK_OBJECT_TYPE_DISPLAY_KHR"; + case VK_OBJECT_TYPE_DISPLAY_MODE_KHR: + return "VK_OBJECT_TYPE_DISPLAY_MODE_KHR"; + case VK_OBJECT_TYPE_EVENT: + return "VK_OBJECT_TYPE_EVENT"; + case VK_OBJECT_TYPE_FENCE: + return "VK_OBJECT_TYPE_FENCE"; + case VK_OBJECT_TYPE_FRAMEBUFFER: + return "VK_OBJECT_TYPE_FRAMEBUFFER"; + case VK_OBJECT_TYPE_IMAGE: + return "VK_OBJECT_TYPE_IMAGE"; + case VK_OBJECT_TYPE_IMAGE_VIEW: + return "VK_OBJECT_TYPE_IMAGE_VIEW"; + case VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV: + return "VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV"; + case VK_OBJECT_TYPE_INSTANCE: + return "VK_OBJECT_TYPE_INSTANCE"; + case VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL: + return "VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL"; + case VK_OBJECT_TYPE_PHYSICAL_DEVICE: + return "VK_OBJECT_TYPE_PHYSICAL_DEVICE"; + case VK_OBJECT_TYPE_PIPELINE: + return "VK_OBJECT_TYPE_PIPELINE"; + case VK_OBJECT_TYPE_PIPELINE_CACHE: + return "VK_OBJECT_TYPE_PIPELINE_CACHE"; + case VK_OBJECT_TYPE_PIPELINE_LAYOUT: + return "VK_OBJECT_TYPE_PIPELINE_LAYOUT"; + case VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT: + return "VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT"; + case VK_OBJECT_TYPE_QUERY_POOL: + return "VK_OBJECT_TYPE_QUERY_POOL"; + case VK_OBJECT_TYPE_QUEUE: + return "VK_OBJECT_TYPE_QUEUE"; + case VK_OBJECT_TYPE_RENDER_PASS: + return "VK_OBJECT_TYPE_RENDER_PASS"; + case VK_OBJECT_TYPE_SAMPLER: + return "VK_OBJECT_TYPE_SAMPLER"; + case VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION: + return "VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION"; + case VK_OBJECT_TYPE_SEMAPHORE: + return "VK_OBJECT_TYPE_SEMAPHORE"; + case VK_OBJECT_TYPE_SHADER_MODULE: + return "VK_OBJECT_TYPE_SHADER_MODULE"; + case VK_OBJECT_TYPE_SURFACE_KHR: + return "VK_OBJECT_TYPE_SURFACE_KHR"; + case VK_OBJECT_TYPE_SWAPCHAIN_KHR: + return "VK_OBJECT_TYPE_SWAPCHAIN_KHR"; + case VK_OBJECT_TYPE_UNKNOWN: + return "VK_OBJECT_TYPE_UNKNOWN"; + case VK_OBJECT_TYPE_VALIDATION_CACHE_EXT: + return "VK_OBJECT_TYPE_VALIDATION_CACHE_EXT"; + default: + return "Unhandled VkObjectType"; + } +} + +static inline const char* string_VkVendorId(VkVendorId input_value) +{ + switch ((VkVendorId)input_value) + { + case VK_VENDOR_ID_CODEPLAY: + return "VK_VENDOR_ID_CODEPLAY"; + case VK_VENDOR_ID_KAZAN: + return "VK_VENDOR_ID_KAZAN"; + case VK_VENDOR_ID_MESA: + return "VK_VENDOR_ID_MESA"; + case VK_VENDOR_ID_VIV: + return "VK_VENDOR_ID_VIV"; + case VK_VENDOR_ID_VSI: + return "VK_VENDOR_ID_VSI"; + default: + return "Unhandled VkVendorId"; + } +} + +static inline const char* string_VkPipelineCacheHeaderVersion(VkPipelineCacheHeaderVersion input_value) +{ + switch ((VkPipelineCacheHeaderVersion)input_value) + { + case VK_PIPELINE_CACHE_HEADER_VERSION_ONE: + return "VK_PIPELINE_CACHE_HEADER_VERSION_ONE"; + default: + return "Unhandled VkPipelineCacheHeaderVersion"; + } +} + static inline const char* string_VkSystemAllocationScope(VkSystemAllocationScope input_value) { switch ((VkSystemAllocationScope)input_value) @@ -994,6 +1401,10 @@ static inline const char* string_VkFormat(VkFormat input_value) return "VK_FORMAT_A2R10G10B10_UNORM_PACK32"; case VK_FORMAT_A2R10G10B10_USCALED_PACK32: return "VK_FORMAT_A2R10G10B10_USCALED_PACK32"; + case VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT: + return "VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT"; + case VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT: + return "VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT"; case VK_FORMAT_A8B8G8R8_SINT_PACK32: return "VK_FORMAT_A8B8G8R8_SINT_PACK32"; case VK_FORMAT_A8B8G8R8_SNORM_PACK32: @@ -1459,6 +1870,8 @@ static inline const char* string_VkFormatFeatureFlagBits(VkFormatFeatureFlagBits { switch ((VkFormatFeatureFlagBits)input_value) { + case VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR: + return "VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR"; case VK_FORMAT_FEATURE_BLIT_DST_BIT: return "VK_FORMAT_FEATURE_BLIT_DST_BIT"; case VK_FORMAT_FEATURE_BLIT_SRC_BIT: @@ -1530,81 +1943,6 @@ static inline std::string string_VkFormatFeatureFlags(VkFormatFeatureFlags input return ret; } -static inline const char* string_VkImageType(VkImageType input_value) -{ - switch ((VkImageType)input_value) - { - case VK_IMAGE_TYPE_1D: - return "VK_IMAGE_TYPE_1D"; - case VK_IMAGE_TYPE_2D: - return "VK_IMAGE_TYPE_2D"; - case VK_IMAGE_TYPE_3D: - return "VK_IMAGE_TYPE_3D"; - default: - return "Unhandled VkImageType"; - } -} - -static inline const char* string_VkImageTiling(VkImageTiling input_value) -{ - switch ((VkImageTiling)input_value) - { - case VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT: - return "VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT"; - case VK_IMAGE_TILING_LINEAR: - return "VK_IMAGE_TILING_LINEAR"; - case VK_IMAGE_TILING_OPTIMAL: - return "VK_IMAGE_TILING_OPTIMAL"; - default: - return "Unhandled VkImageTiling"; - } -} - -static inline const char* string_VkImageUsageFlagBits(VkImageUsageFlagBits input_value) -{ - switch ((VkImageUsageFlagBits)input_value) - { - case VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT: - return "VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT"; - case VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT: - return "VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT"; - case VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT: - return "VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT"; - case VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT: - return "VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT"; - case VK_IMAGE_USAGE_SAMPLED_BIT: - return "VK_IMAGE_USAGE_SAMPLED_BIT"; - case VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV: - return "VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV"; - case VK_IMAGE_USAGE_STORAGE_BIT: - return "VK_IMAGE_USAGE_STORAGE_BIT"; - case VK_IMAGE_USAGE_TRANSFER_DST_BIT: - return "VK_IMAGE_USAGE_TRANSFER_DST_BIT"; - case VK_IMAGE_USAGE_TRANSFER_SRC_BIT: - return "VK_IMAGE_USAGE_TRANSFER_SRC_BIT"; - case VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT: - return "VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT"; - default: - return "Unhandled VkImageUsageFlagBits"; - } -} - -static inline std::string string_VkImageUsageFlags(VkImageUsageFlags input_value) -{ - std::string ret; - int index = 0; - while(input_value) { - if (input_value & 1) { - if( !ret.empty()) ret.append("|"); - ret.append(string_VkImageUsageFlagBits(static_cast(1 << index))); - } - ++index; - input_value >>= 1; - } - if( ret.empty()) ret.append(string_VkImageUsageFlagBits(static_cast(0))); - return ret; -} - static inline const char* string_VkImageCreateFlagBits(VkImageCreateFlagBits input_value) { switch ((VkImageCreateFlagBits)input_value) @@ -1699,6 +2037,151 @@ static inline std::string string_VkSampleCountFlags(VkSampleCountFlags input_val return ret; } +static inline const char* string_VkImageTiling(VkImageTiling input_value) +{ + switch ((VkImageTiling)input_value) + { + case VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT: + return "VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT"; + case VK_IMAGE_TILING_LINEAR: + return "VK_IMAGE_TILING_LINEAR"; + case VK_IMAGE_TILING_OPTIMAL: + return "VK_IMAGE_TILING_OPTIMAL"; + default: + return "Unhandled VkImageTiling"; + } +} + +static inline const char* string_VkImageType(VkImageType input_value) +{ + switch ((VkImageType)input_value) + { + case VK_IMAGE_TYPE_1D: + return "VK_IMAGE_TYPE_1D"; + case VK_IMAGE_TYPE_2D: + return "VK_IMAGE_TYPE_2D"; + case VK_IMAGE_TYPE_3D: + return "VK_IMAGE_TYPE_3D"; + default: + return "Unhandled VkImageType"; + } +} + +static inline const char* string_VkImageUsageFlagBits(VkImageUsageFlagBits input_value) +{ + switch ((VkImageUsageFlagBits)input_value) + { + case VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT: + return "VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT"; + case VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT: + return "VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT"; + case VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT: + return "VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT"; + case VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT: + return "VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT"; + case VK_IMAGE_USAGE_SAMPLED_BIT: + return "VK_IMAGE_USAGE_SAMPLED_BIT"; + case VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV: + return "VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV"; + case VK_IMAGE_USAGE_STORAGE_BIT: + return "VK_IMAGE_USAGE_STORAGE_BIT"; + case VK_IMAGE_USAGE_TRANSFER_DST_BIT: + return "VK_IMAGE_USAGE_TRANSFER_DST_BIT"; + case VK_IMAGE_USAGE_TRANSFER_SRC_BIT: + return "VK_IMAGE_USAGE_TRANSFER_SRC_BIT"; + case VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT: + return "VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT"; + default: + return "Unhandled VkImageUsageFlagBits"; + } +} + +static inline std::string string_VkImageUsageFlags(VkImageUsageFlags input_value) +{ + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkImageUsageFlagBits(static_cast(1 << index))); + } + ++index; + input_value >>= 1; + } + if( ret.empty()) ret.append(string_VkImageUsageFlagBits(static_cast(0))); + return ret; +} + +static inline const char* string_VkMemoryHeapFlagBits(VkMemoryHeapFlagBits input_value) +{ + switch ((VkMemoryHeapFlagBits)input_value) + { + case VK_MEMORY_HEAP_DEVICE_LOCAL_BIT: + return "VK_MEMORY_HEAP_DEVICE_LOCAL_BIT"; + case VK_MEMORY_HEAP_MULTI_INSTANCE_BIT: + return "VK_MEMORY_HEAP_MULTI_INSTANCE_BIT"; + default: + return "Unhandled VkMemoryHeapFlagBits"; + } +} + +static inline std::string string_VkMemoryHeapFlags(VkMemoryHeapFlags input_value) +{ + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkMemoryHeapFlagBits(static_cast(1 << index))); + } + ++index; + input_value >>= 1; + } + if( ret.empty()) ret.append(string_VkMemoryHeapFlagBits(static_cast(0))); + return ret; +} + +static inline const char* string_VkMemoryPropertyFlagBits(VkMemoryPropertyFlagBits input_value) +{ + switch ((VkMemoryPropertyFlagBits)input_value) + { + case VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD: + return "VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD"; + case VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT: + return "VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT"; + case VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD: + return "VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD"; + case VK_MEMORY_PROPERTY_HOST_CACHED_BIT: + return "VK_MEMORY_PROPERTY_HOST_CACHED_BIT"; + case VK_MEMORY_PROPERTY_HOST_COHERENT_BIT: + return "VK_MEMORY_PROPERTY_HOST_COHERENT_BIT"; + case VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT: + return "VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT"; + case VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT: + return "VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT"; + case VK_MEMORY_PROPERTY_PROTECTED_BIT: + return "VK_MEMORY_PROPERTY_PROTECTED_BIT"; + default: + return "Unhandled VkMemoryPropertyFlagBits"; + } +} + +static inline std::string string_VkMemoryPropertyFlags(VkMemoryPropertyFlags input_value) +{ + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkMemoryPropertyFlagBits(static_cast(1 << index))); + } + ++index; + input_value >>= 1; + } + if( ret.empty()) ret.append(string_VkMemoryPropertyFlagBits(static_cast(0))); + return ret; +} + static inline const char* string_VkPhysicalDeviceType(VkPhysicalDeviceType input_value) { switch ((VkPhysicalDeviceType)input_value) @@ -1753,76 +2236,6 @@ static inline std::string string_VkQueueFlags(VkQueueFlags input_value) return ret; } -static inline const char* string_VkMemoryPropertyFlagBits(VkMemoryPropertyFlagBits input_value) -{ - switch ((VkMemoryPropertyFlagBits)input_value) - { - case VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD: - return "VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD"; - case VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT: - return "VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT"; - case VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD: - return "VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD"; - case VK_MEMORY_PROPERTY_HOST_CACHED_BIT: - return "VK_MEMORY_PROPERTY_HOST_CACHED_BIT"; - case VK_MEMORY_PROPERTY_HOST_COHERENT_BIT: - return "VK_MEMORY_PROPERTY_HOST_COHERENT_BIT"; - case VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT: - return "VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT"; - case VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT: - return "VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT"; - case VK_MEMORY_PROPERTY_PROTECTED_BIT: - return "VK_MEMORY_PROPERTY_PROTECTED_BIT"; - default: - return "Unhandled VkMemoryPropertyFlagBits"; - } -} - -static inline std::string string_VkMemoryPropertyFlags(VkMemoryPropertyFlags input_value) -{ - std::string ret; - int index = 0; - while(input_value) { - if (input_value & 1) { - if( !ret.empty()) ret.append("|"); - ret.append(string_VkMemoryPropertyFlagBits(static_cast(1 << index))); - } - ++index; - input_value >>= 1; - } - if( ret.empty()) ret.append(string_VkMemoryPropertyFlagBits(static_cast(0))); - return ret; -} - -static inline const char* string_VkMemoryHeapFlagBits(VkMemoryHeapFlagBits input_value) -{ - switch ((VkMemoryHeapFlagBits)input_value) - { - case VK_MEMORY_HEAP_DEVICE_LOCAL_BIT: - return "VK_MEMORY_HEAP_DEVICE_LOCAL_BIT"; - case VK_MEMORY_HEAP_MULTI_INSTANCE_BIT: - return "VK_MEMORY_HEAP_MULTI_INSTANCE_BIT"; - default: - return "Unhandled VkMemoryHeapFlagBits"; - } -} - -static inline std::string string_VkMemoryHeapFlags(VkMemoryHeapFlags input_value) -{ - std::string ret; - int index = 0; - while(input_value) { - if (input_value & 1) { - if( !ret.empty()) ret.append("|"); - ret.append(string_VkMemoryHeapFlagBits(static_cast(1 << index))); - } - ++index; - input_value >>= 1; - } - if( ret.empty()) ret.append(string_VkMemoryHeapFlagBits(static_cast(0))); - return ret; -} - static inline const char* string_VkDeviceQueueCreateFlagBits(VkDeviceQueueCreateFlagBits input_value) { switch ((VkDeviceQueueCreateFlagBits)input_value) @@ -1854,8 +2267,8 @@ static inline const char* string_VkPipelineStageFlagBits(VkPipelineStageFlagBits { switch ((VkPipelineStageFlagBits)input_value) { - case VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV: - return "VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV"; + case VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR: + return "VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR"; case VK_PIPELINE_STAGE_ALL_COMMANDS_BIT: return "VK_PIPELINE_STAGE_ALL_COMMANDS_BIT"; case VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT: @@ -1864,8 +2277,8 @@ static inline const char* string_VkPipelineStageFlagBits(VkPipelineStageFlagBits return "VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT"; case VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT: return "VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT"; - case VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX: - return "VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX"; + case VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV: + return "VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV"; case VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT: return "VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT"; case VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT: @@ -1886,8 +2299,8 @@ static inline const char* string_VkPipelineStageFlagBits(VkPipelineStageFlagBits return "VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT"; case VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV: return "VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV"; - case VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV: - return "VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV"; + case VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR: + return "VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR"; case VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV: return "VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV"; case VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV: @@ -1927,50 +2340,30 @@ static inline std::string string_VkPipelineStageFlags(VkPipelineStageFlags input return ret; } -static inline const char* string_VkImageAspectFlagBits(VkImageAspectFlagBits input_value) +static inline const char* string_VkSparseMemoryBindFlagBits(VkSparseMemoryBindFlagBits input_value) { - switch ((VkImageAspectFlagBits)input_value) + switch ((VkSparseMemoryBindFlagBits)input_value) { - case VK_IMAGE_ASPECT_COLOR_BIT: - return "VK_IMAGE_ASPECT_COLOR_BIT"; - case VK_IMAGE_ASPECT_DEPTH_BIT: - return "VK_IMAGE_ASPECT_DEPTH_BIT"; - case VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT: - return "VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT"; - case VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT: - return "VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT"; - case VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT: - return "VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT"; - case VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT: - return "VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT"; - case VK_IMAGE_ASPECT_METADATA_BIT: - return "VK_IMAGE_ASPECT_METADATA_BIT"; - case VK_IMAGE_ASPECT_PLANE_0_BIT: - return "VK_IMAGE_ASPECT_PLANE_0_BIT"; - case VK_IMAGE_ASPECT_PLANE_1_BIT: - return "VK_IMAGE_ASPECT_PLANE_1_BIT"; - case VK_IMAGE_ASPECT_PLANE_2_BIT: - return "VK_IMAGE_ASPECT_PLANE_2_BIT"; - case VK_IMAGE_ASPECT_STENCIL_BIT: - return "VK_IMAGE_ASPECT_STENCIL_BIT"; + case VK_SPARSE_MEMORY_BIND_METADATA_BIT: + return "VK_SPARSE_MEMORY_BIND_METADATA_BIT"; default: - return "Unhandled VkImageAspectFlagBits"; + return "Unhandled VkSparseMemoryBindFlagBits"; } } -static inline std::string string_VkImageAspectFlags(VkImageAspectFlags input_value) +static inline std::string string_VkSparseMemoryBindFlags(VkSparseMemoryBindFlags input_value) { std::string ret; int index = 0; while(input_value) { if (input_value & 1) { if( !ret.empty()) ret.append("|"); - ret.append(string_VkImageAspectFlagBits(static_cast(1 << index))); + ret.append(string_VkSparseMemoryBindFlagBits(static_cast(1 << index))); } ++index; input_value >>= 1; } - if( ret.empty()) ret.append(string_VkImageAspectFlagBits(static_cast(0))); + if( ret.empty()) ret.append(string_VkSparseMemoryBindFlagBits(static_cast(0))); return ret; } @@ -2005,33 +2398,6 @@ static inline std::string string_VkSparseImageFormatFlags(VkSparseImageFormatFla return ret; } -static inline const char* string_VkSparseMemoryBindFlagBits(VkSparseMemoryBindFlagBits input_value) -{ - switch ((VkSparseMemoryBindFlagBits)input_value) - { - case VK_SPARSE_MEMORY_BIND_METADATA_BIT: - return "VK_SPARSE_MEMORY_BIND_METADATA_BIT"; - default: - return "Unhandled VkSparseMemoryBindFlagBits"; - } -} - -static inline std::string string_VkSparseMemoryBindFlags(VkSparseMemoryBindFlags input_value) -{ - std::string ret; - int index = 0; - while(input_value) { - if (input_value & 1) { - if( !ret.empty()) ret.append("|"); - ret.append(string_VkSparseMemoryBindFlagBits(static_cast(1 << index))); - } - ++index; - input_value >>= 1; - } - if( ret.empty()) ret.append(string_VkSparseMemoryBindFlagBits(static_cast(0))); - return ret; -} - static inline const char* string_VkFenceCreateFlagBits(VkFenceCreateFlagBits input_value) { switch ((VkFenceCreateFlagBits)input_value) @@ -2059,29 +2425,6 @@ static inline std::string string_VkFenceCreateFlags(VkFenceCreateFlags input_val return ret; } -static inline const char* string_VkQueryType(VkQueryType input_value) -{ - switch ((VkQueryType)input_value) - { - case VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV: - return "VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV"; - case VK_QUERY_TYPE_OCCLUSION: - return "VK_QUERY_TYPE_OCCLUSION"; - case VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL: - return "VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL"; - case VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR: - return "VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR"; - case VK_QUERY_TYPE_PIPELINE_STATISTICS: - return "VK_QUERY_TYPE_PIPELINE_STATISTICS"; - case VK_QUERY_TYPE_TIMESTAMP: - return "VK_QUERY_TYPE_TIMESTAMP"; - case VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT: - return "VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT"; - default: - return "Unhandled VkQueryType"; - } -} - static inline const char* string_VkQueryPipelineStatisticFlagBits(VkQueryPipelineStatisticFlagBits input_value) { switch ((VkQueryPipelineStatisticFlagBits)input_value) @@ -2129,6 +2472,31 @@ static inline std::string string_VkQueryPipelineStatisticFlags(VkQueryPipelineSt return ret; } +static inline const char* string_VkQueryType(VkQueryType input_value) +{ + switch ((VkQueryType)input_value) + { + case VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR: + return "VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR"; + case VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR: + return "VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR"; + case VK_QUERY_TYPE_OCCLUSION: + return "VK_QUERY_TYPE_OCCLUSION"; + case VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL: + return "VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL"; + case VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR: + return "VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR"; + case VK_QUERY_TYPE_PIPELINE_STATISTICS: + return "VK_QUERY_TYPE_PIPELINE_STATISTICS"; + case VK_QUERY_TYPE_TIMESTAMP: + return "VK_QUERY_TYPE_TIMESTAMP"; + case VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT: + return "VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT"; + default: + return "Unhandled VkQueryType"; + } +} + static inline const char* string_VkQueryResultFlagBits(VkQueryResultFlagBits input_value) { switch ((VkQueryResultFlagBits)input_value) @@ -2207,8 +2575,8 @@ static inline const char* string_VkBufferUsageFlagBits(VkBufferUsageFlagBits inp return "VK_BUFFER_USAGE_INDEX_BUFFER_BIT"; case VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT: return "VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT"; - case VK_BUFFER_USAGE_RAY_TRACING_BIT_NV: - return "VK_BUFFER_USAGE_RAY_TRACING_BIT_NV"; + case VK_BUFFER_USAGE_RAY_TRACING_BIT_KHR: + return "VK_BUFFER_USAGE_RAY_TRACING_BIT_KHR"; case VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT: return "VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT"; case VK_BUFFER_USAGE_STORAGE_BUFFER_BIT: @@ -2263,50 +2631,26 @@ static inline const char* string_VkSharingMode(VkSharingMode input_value) } } -static inline const char* string_VkImageLayout(VkImageLayout input_value) +static inline const char* string_VkComponentSwizzle(VkComponentSwizzle input_value) { - switch ((VkImageLayout)input_value) + switch ((VkComponentSwizzle)input_value) { - case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: - return "VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL"; - case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL: - return "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL"; - case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL: - return "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL"; - case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL: - return "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL"; - case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL: - return "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL"; - case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: - return "VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL"; - case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL: - return "VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL"; - case VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT: - return "VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT"; - case VK_IMAGE_LAYOUT_GENERAL: - return "VK_IMAGE_LAYOUT_GENERAL"; - case VK_IMAGE_LAYOUT_PREINITIALIZED: - return "VK_IMAGE_LAYOUT_PREINITIALIZED"; - case VK_IMAGE_LAYOUT_PRESENT_SRC_KHR: - return "VK_IMAGE_LAYOUT_PRESENT_SRC_KHR"; - case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL: - return "VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL"; - case VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV: - return "VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV"; - case VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR: - return "VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR"; - case VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL: - return "VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL"; - case VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL: - return "VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL"; - case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL: - return "VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL"; - case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL: - return "VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL"; - case VK_IMAGE_LAYOUT_UNDEFINED: - return "VK_IMAGE_LAYOUT_UNDEFINED"; + case VK_COMPONENT_SWIZZLE_A: + return "VK_COMPONENT_SWIZZLE_A"; + case VK_COMPONENT_SWIZZLE_B: + return "VK_COMPONENT_SWIZZLE_B"; + case VK_COMPONENT_SWIZZLE_G: + return "VK_COMPONENT_SWIZZLE_G"; + case VK_COMPONENT_SWIZZLE_IDENTITY: + return "VK_COMPONENT_SWIZZLE_IDENTITY"; + case VK_COMPONENT_SWIZZLE_ONE: + return "VK_COMPONENT_SWIZZLE_ONE"; + case VK_COMPONENT_SWIZZLE_R: + return "VK_COMPONENT_SWIZZLE_R"; + case VK_COMPONENT_SWIZZLE_ZERO: + return "VK_COMPONENT_SWIZZLE_ZERO"; default: - return "Unhandled VkImageLayout"; + return "Unhandled VkComponentSwizzle"; } } @@ -2314,6 +2658,8 @@ static inline const char* string_VkImageViewCreateFlagBits(VkImageViewCreateFlag { switch ((VkImageViewCreateFlagBits)input_value) { + case VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT: + return "VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT"; case VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT: return "VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT"; default: @@ -2360,354 +2706,33 @@ static inline const char* string_VkImageViewType(VkImageViewType input_value) } } -static inline const char* string_VkComponentSwizzle(VkComponentSwizzle input_value) +static inline const char* string_VkPipelineCacheCreateFlagBits(VkPipelineCacheCreateFlagBits input_value) { - switch ((VkComponentSwizzle)input_value) + switch ((VkPipelineCacheCreateFlagBits)input_value) { - case VK_COMPONENT_SWIZZLE_A: - return "VK_COMPONENT_SWIZZLE_A"; - case VK_COMPONENT_SWIZZLE_B: - return "VK_COMPONENT_SWIZZLE_B"; - case VK_COMPONENT_SWIZZLE_G: - return "VK_COMPONENT_SWIZZLE_G"; - case VK_COMPONENT_SWIZZLE_IDENTITY: - return "VK_COMPONENT_SWIZZLE_IDENTITY"; - case VK_COMPONENT_SWIZZLE_ONE: - return "VK_COMPONENT_SWIZZLE_ONE"; - case VK_COMPONENT_SWIZZLE_R: - return "VK_COMPONENT_SWIZZLE_R"; - case VK_COMPONENT_SWIZZLE_ZERO: - return "VK_COMPONENT_SWIZZLE_ZERO"; + case VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT: + return "VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT"; default: - return "Unhandled VkComponentSwizzle"; + return "Unhandled VkPipelineCacheCreateFlagBits"; } } -static inline const char* string_VkPipelineCreateFlagBits(VkPipelineCreateFlagBits input_value) -{ - switch ((VkPipelineCreateFlagBits)input_value) - { - case VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT: - return "VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT"; - case VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR: - return "VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR"; - case VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR: - return "VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR"; - case VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV: - return "VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV"; - case VK_PIPELINE_CREATE_DERIVATIVE_BIT: - return "VK_PIPELINE_CREATE_DERIVATIVE_BIT"; - case VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT: - return "VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT"; - case VK_PIPELINE_CREATE_DISPATCH_BASE_BIT: - return "VK_PIPELINE_CREATE_DISPATCH_BASE_BIT"; - case VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT: - return "VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT"; - default: - return "Unhandled VkPipelineCreateFlagBits"; - } -} - -static inline std::string string_VkPipelineCreateFlags(VkPipelineCreateFlags input_value) +static inline std::string string_VkPipelineCacheCreateFlags(VkPipelineCacheCreateFlags input_value) { std::string ret; int index = 0; while(input_value) { if (input_value & 1) { if( !ret.empty()) ret.append("|"); - ret.append(string_VkPipelineCreateFlagBits(static_cast(1 << index))); + ret.append(string_VkPipelineCacheCreateFlagBits(static_cast(1 << index))); } ++index; input_value >>= 1; } - if( ret.empty()) ret.append(string_VkPipelineCreateFlagBits(static_cast(0))); + if( ret.empty()) ret.append(string_VkPipelineCacheCreateFlagBits(static_cast(0))); return ret; } -static inline const char* string_VkPipelineShaderStageCreateFlagBits(VkPipelineShaderStageCreateFlagBits input_value) -{ - switch ((VkPipelineShaderStageCreateFlagBits)input_value) - { - case VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT: - return "VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT"; - case VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT: - return "VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT"; - default: - return "Unhandled VkPipelineShaderStageCreateFlagBits"; - } -} - -static inline std::string string_VkPipelineShaderStageCreateFlags(VkPipelineShaderStageCreateFlags input_value) -{ - std::string ret; - int index = 0; - while(input_value) { - if (input_value & 1) { - if( !ret.empty()) ret.append("|"); - ret.append(string_VkPipelineShaderStageCreateFlagBits(static_cast(1 << index))); - } - ++index; - input_value >>= 1; - } - if( ret.empty()) ret.append(string_VkPipelineShaderStageCreateFlagBits(static_cast(0))); - return ret; -} - -static inline const char* string_VkShaderStageFlagBits(VkShaderStageFlagBits input_value) -{ - switch ((VkShaderStageFlagBits)input_value) - { - case VK_SHADER_STAGE_ALL: - return "VK_SHADER_STAGE_ALL"; - case VK_SHADER_STAGE_ALL_GRAPHICS: - return "VK_SHADER_STAGE_ALL_GRAPHICS"; - case VK_SHADER_STAGE_ANY_HIT_BIT_NV: - return "VK_SHADER_STAGE_ANY_HIT_BIT_NV"; - case VK_SHADER_STAGE_CALLABLE_BIT_NV: - return "VK_SHADER_STAGE_CALLABLE_BIT_NV"; - case VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV: - return "VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV"; - case VK_SHADER_STAGE_COMPUTE_BIT: - return "VK_SHADER_STAGE_COMPUTE_BIT"; - case VK_SHADER_STAGE_FRAGMENT_BIT: - return "VK_SHADER_STAGE_FRAGMENT_BIT"; - case VK_SHADER_STAGE_GEOMETRY_BIT: - return "VK_SHADER_STAGE_GEOMETRY_BIT"; - case VK_SHADER_STAGE_INTERSECTION_BIT_NV: - return "VK_SHADER_STAGE_INTERSECTION_BIT_NV"; - case VK_SHADER_STAGE_MESH_BIT_NV: - return "VK_SHADER_STAGE_MESH_BIT_NV"; - case VK_SHADER_STAGE_MISS_BIT_NV: - return "VK_SHADER_STAGE_MISS_BIT_NV"; - case VK_SHADER_STAGE_RAYGEN_BIT_NV: - return "VK_SHADER_STAGE_RAYGEN_BIT_NV"; - case VK_SHADER_STAGE_TASK_BIT_NV: - return "VK_SHADER_STAGE_TASK_BIT_NV"; - case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT: - return "VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT"; - case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT: - return "VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT"; - case VK_SHADER_STAGE_VERTEX_BIT: - return "VK_SHADER_STAGE_VERTEX_BIT"; - default: - return "Unhandled VkShaderStageFlagBits"; - } -} - -static inline std::string string_VkShaderStageFlags(VkShaderStageFlags input_value) -{ - std::string ret; - int index = 0; - while(input_value) { - if (input_value & 1) { - if( !ret.empty()) ret.append("|"); - ret.append(string_VkShaderStageFlagBits(static_cast(1 << index))); - } - ++index; - input_value >>= 1; - } - if( ret.empty()) ret.append(string_VkShaderStageFlagBits(static_cast(0))); - return ret; -} - -static inline const char* string_VkVertexInputRate(VkVertexInputRate input_value) -{ - switch ((VkVertexInputRate)input_value) - { - case VK_VERTEX_INPUT_RATE_INSTANCE: - return "VK_VERTEX_INPUT_RATE_INSTANCE"; - case VK_VERTEX_INPUT_RATE_VERTEX: - return "VK_VERTEX_INPUT_RATE_VERTEX"; - default: - return "Unhandled VkVertexInputRate"; - } -} - -static inline const char* string_VkPrimitiveTopology(VkPrimitiveTopology input_value) -{ - switch ((VkPrimitiveTopology)input_value) - { - case VK_PRIMITIVE_TOPOLOGY_LINE_LIST: - return "VK_PRIMITIVE_TOPOLOGY_LINE_LIST"; - case VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY: - return "VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY"; - case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP: - return "VK_PRIMITIVE_TOPOLOGY_LINE_STRIP"; - case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY: - return "VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY"; - case VK_PRIMITIVE_TOPOLOGY_PATCH_LIST: - return "VK_PRIMITIVE_TOPOLOGY_PATCH_LIST"; - case VK_PRIMITIVE_TOPOLOGY_POINT_LIST: - return "VK_PRIMITIVE_TOPOLOGY_POINT_LIST"; - case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN: - return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN"; - case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST: - return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST"; - case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY: - return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY"; - case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP: - return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP"; - case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY: - return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY"; - default: - return "Unhandled VkPrimitiveTopology"; - } -} - -static inline const char* string_VkPolygonMode(VkPolygonMode input_value) -{ - switch ((VkPolygonMode)input_value) - { - case VK_POLYGON_MODE_FILL: - return "VK_POLYGON_MODE_FILL"; - case VK_POLYGON_MODE_FILL_RECTANGLE_NV: - return "VK_POLYGON_MODE_FILL_RECTANGLE_NV"; - case VK_POLYGON_MODE_LINE: - return "VK_POLYGON_MODE_LINE"; - case VK_POLYGON_MODE_POINT: - return "VK_POLYGON_MODE_POINT"; - default: - return "Unhandled VkPolygonMode"; - } -} - -static inline const char* string_VkCullModeFlagBits(VkCullModeFlagBits input_value) -{ - switch ((VkCullModeFlagBits)input_value) - { - case VK_CULL_MODE_BACK_BIT: - return "VK_CULL_MODE_BACK_BIT"; - case VK_CULL_MODE_FRONT_AND_BACK: - return "VK_CULL_MODE_FRONT_AND_BACK"; - case VK_CULL_MODE_FRONT_BIT: - return "VK_CULL_MODE_FRONT_BIT"; - case VK_CULL_MODE_NONE: - return "VK_CULL_MODE_NONE"; - default: - return "Unhandled VkCullModeFlagBits"; - } -} - -static inline std::string string_VkCullModeFlags(VkCullModeFlags input_value) -{ - std::string ret; - int index = 0; - while(input_value) { - if (input_value & 1) { - if( !ret.empty()) ret.append("|"); - ret.append(string_VkCullModeFlagBits(static_cast(1 << index))); - } - ++index; - input_value >>= 1; - } - if( ret.empty()) ret.append(string_VkCullModeFlagBits(static_cast(0))); - return ret; -} - -static inline const char* string_VkFrontFace(VkFrontFace input_value) -{ - switch ((VkFrontFace)input_value) - { - case VK_FRONT_FACE_CLOCKWISE: - return "VK_FRONT_FACE_CLOCKWISE"; - case VK_FRONT_FACE_COUNTER_CLOCKWISE: - return "VK_FRONT_FACE_COUNTER_CLOCKWISE"; - default: - return "Unhandled VkFrontFace"; - } -} - -static inline const char* string_VkCompareOp(VkCompareOp input_value) -{ - switch ((VkCompareOp)input_value) - { - case VK_COMPARE_OP_ALWAYS: - return "VK_COMPARE_OP_ALWAYS"; - case VK_COMPARE_OP_EQUAL: - return "VK_COMPARE_OP_EQUAL"; - case VK_COMPARE_OP_GREATER: - return "VK_COMPARE_OP_GREATER"; - case VK_COMPARE_OP_GREATER_OR_EQUAL: - return "VK_COMPARE_OP_GREATER_OR_EQUAL"; - case VK_COMPARE_OP_LESS: - return "VK_COMPARE_OP_LESS"; - case VK_COMPARE_OP_LESS_OR_EQUAL: - return "VK_COMPARE_OP_LESS_OR_EQUAL"; - case VK_COMPARE_OP_NEVER: - return "VK_COMPARE_OP_NEVER"; - case VK_COMPARE_OP_NOT_EQUAL: - return "VK_COMPARE_OP_NOT_EQUAL"; - default: - return "Unhandled VkCompareOp"; - } -} - -static inline const char* string_VkStencilOp(VkStencilOp input_value) -{ - switch ((VkStencilOp)input_value) - { - case VK_STENCIL_OP_DECREMENT_AND_CLAMP: - return "VK_STENCIL_OP_DECREMENT_AND_CLAMP"; - case VK_STENCIL_OP_DECREMENT_AND_WRAP: - return "VK_STENCIL_OP_DECREMENT_AND_WRAP"; - case VK_STENCIL_OP_INCREMENT_AND_CLAMP: - return "VK_STENCIL_OP_INCREMENT_AND_CLAMP"; - case VK_STENCIL_OP_INCREMENT_AND_WRAP: - return "VK_STENCIL_OP_INCREMENT_AND_WRAP"; - case VK_STENCIL_OP_INVERT: - return "VK_STENCIL_OP_INVERT"; - case VK_STENCIL_OP_KEEP: - return "VK_STENCIL_OP_KEEP"; - case VK_STENCIL_OP_REPLACE: - return "VK_STENCIL_OP_REPLACE"; - case VK_STENCIL_OP_ZERO: - return "VK_STENCIL_OP_ZERO"; - default: - return "Unhandled VkStencilOp"; - } -} - -static inline const char* string_VkLogicOp(VkLogicOp input_value) -{ - switch ((VkLogicOp)input_value) - { - case VK_LOGIC_OP_AND: - return "VK_LOGIC_OP_AND"; - case VK_LOGIC_OP_AND_INVERTED: - return "VK_LOGIC_OP_AND_INVERTED"; - case VK_LOGIC_OP_AND_REVERSE: - return "VK_LOGIC_OP_AND_REVERSE"; - case VK_LOGIC_OP_CLEAR: - return "VK_LOGIC_OP_CLEAR"; - case VK_LOGIC_OP_COPY: - return "VK_LOGIC_OP_COPY"; - case VK_LOGIC_OP_COPY_INVERTED: - return "VK_LOGIC_OP_COPY_INVERTED"; - case VK_LOGIC_OP_EQUIVALENT: - return "VK_LOGIC_OP_EQUIVALENT"; - case VK_LOGIC_OP_INVERT: - return "VK_LOGIC_OP_INVERT"; - case VK_LOGIC_OP_NAND: - return "VK_LOGIC_OP_NAND"; - case VK_LOGIC_OP_NOR: - return "VK_LOGIC_OP_NOR"; - case VK_LOGIC_OP_NO_OP: - return "VK_LOGIC_OP_NO_OP"; - case VK_LOGIC_OP_OR: - return "VK_LOGIC_OP_OR"; - case VK_LOGIC_OP_OR_INVERTED: - return "VK_LOGIC_OP_OR_INVERTED"; - case VK_LOGIC_OP_OR_REVERSE: - return "VK_LOGIC_OP_OR_REVERSE"; - case VK_LOGIC_OP_SET: - return "VK_LOGIC_OP_SET"; - case VK_LOGIC_OP_XOR: - return "VK_LOGIC_OP_XOR"; - default: - return "Unhandled VkLogicOp"; - } -} - static inline const char* string_VkBlendFactor(VkBlendFactor input_value) { switch ((VkBlendFactor)input_value) @@ -2899,40 +2924,269 @@ static inline std::string string_VkColorComponentFlags(VkColorComponentFlags inp return ret; } +static inline const char* string_VkCompareOp(VkCompareOp input_value) +{ + switch ((VkCompareOp)input_value) + { + case VK_COMPARE_OP_ALWAYS: + return "VK_COMPARE_OP_ALWAYS"; + case VK_COMPARE_OP_EQUAL: + return "VK_COMPARE_OP_EQUAL"; + case VK_COMPARE_OP_GREATER: + return "VK_COMPARE_OP_GREATER"; + case VK_COMPARE_OP_GREATER_OR_EQUAL: + return "VK_COMPARE_OP_GREATER_OR_EQUAL"; + case VK_COMPARE_OP_LESS: + return "VK_COMPARE_OP_LESS"; + case VK_COMPARE_OP_LESS_OR_EQUAL: + return "VK_COMPARE_OP_LESS_OR_EQUAL"; + case VK_COMPARE_OP_NEVER: + return "VK_COMPARE_OP_NEVER"; + case VK_COMPARE_OP_NOT_EQUAL: + return "VK_COMPARE_OP_NOT_EQUAL"; + default: + return "Unhandled VkCompareOp"; + } +} + +static inline const char* string_VkPipelineCreateFlagBits(VkPipelineCreateFlagBits input_value) +{ + switch ((VkPipelineCreateFlagBits)input_value) + { + case VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT: + return "VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT"; + case VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR: + return "VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR"; + case VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR: + return "VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR"; + case VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV: + return "VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV"; + case VK_PIPELINE_CREATE_DERIVATIVE_BIT: + return "VK_PIPELINE_CREATE_DERIVATIVE_BIT"; + case VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT: + return "VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT"; + case VK_PIPELINE_CREATE_DISPATCH_BASE_BIT: + return "VK_PIPELINE_CREATE_DISPATCH_BASE_BIT"; + case VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT: + return "VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT"; + case VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT: + return "VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT"; + case VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV: + return "VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV"; + case VK_PIPELINE_CREATE_LIBRARY_BIT_KHR: + return "VK_PIPELINE_CREATE_LIBRARY_BIT_KHR"; + case VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR: + return "VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR"; + case VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR: + return "VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR"; + case VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR: + return "VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR"; + case VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR: + return "VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR"; + case VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR: + return "VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR"; + case VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR: + return "VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR"; + case VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT: + return "VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT"; + default: + return "Unhandled VkPipelineCreateFlagBits"; + } +} + +static inline std::string string_VkPipelineCreateFlags(VkPipelineCreateFlags input_value) +{ + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPipelineCreateFlagBits(static_cast(1 << index))); + } + ++index; + input_value >>= 1; + } + if( ret.empty()) ret.append(string_VkPipelineCreateFlagBits(static_cast(0))); + return ret; +} + +static inline const char* string_VkPipelineShaderStageCreateFlagBits(VkPipelineShaderStageCreateFlagBits input_value) +{ + switch ((VkPipelineShaderStageCreateFlagBits)input_value) + { + case VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT: + return "VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT"; + case VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT: + return "VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT"; + default: + return "Unhandled VkPipelineShaderStageCreateFlagBits"; + } +} + +static inline std::string string_VkPipelineShaderStageCreateFlags(VkPipelineShaderStageCreateFlags input_value) +{ + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPipelineShaderStageCreateFlagBits(static_cast(1 << index))); + } + ++index; + input_value >>= 1; + } + if( ret.empty()) ret.append(string_VkPipelineShaderStageCreateFlagBits(static_cast(0))); + return ret; +} + +static inline const char* string_VkShaderStageFlagBits(VkShaderStageFlagBits input_value) +{ + switch ((VkShaderStageFlagBits)input_value) + { + case VK_SHADER_STAGE_ALL: + return "VK_SHADER_STAGE_ALL"; + case VK_SHADER_STAGE_ALL_GRAPHICS: + return "VK_SHADER_STAGE_ALL_GRAPHICS"; + case VK_SHADER_STAGE_ANY_HIT_BIT_KHR: + return "VK_SHADER_STAGE_ANY_HIT_BIT_KHR"; + case VK_SHADER_STAGE_CALLABLE_BIT_KHR: + return "VK_SHADER_STAGE_CALLABLE_BIT_KHR"; + case VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR: + return "VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR"; + case VK_SHADER_STAGE_COMPUTE_BIT: + return "VK_SHADER_STAGE_COMPUTE_BIT"; + case VK_SHADER_STAGE_FRAGMENT_BIT: + return "VK_SHADER_STAGE_FRAGMENT_BIT"; + case VK_SHADER_STAGE_GEOMETRY_BIT: + return "VK_SHADER_STAGE_GEOMETRY_BIT"; + case VK_SHADER_STAGE_INTERSECTION_BIT_KHR: + return "VK_SHADER_STAGE_INTERSECTION_BIT_KHR"; + case VK_SHADER_STAGE_MESH_BIT_NV: + return "VK_SHADER_STAGE_MESH_BIT_NV"; + case VK_SHADER_STAGE_MISS_BIT_KHR: + return "VK_SHADER_STAGE_MISS_BIT_KHR"; + case VK_SHADER_STAGE_RAYGEN_BIT_KHR: + return "VK_SHADER_STAGE_RAYGEN_BIT_KHR"; + case VK_SHADER_STAGE_TASK_BIT_NV: + return "VK_SHADER_STAGE_TASK_BIT_NV"; + case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT: + return "VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT"; + case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT: + return "VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT"; + case VK_SHADER_STAGE_VERTEX_BIT: + return "VK_SHADER_STAGE_VERTEX_BIT"; + default: + return "Unhandled VkShaderStageFlagBits"; + } +} + +static inline std::string string_VkShaderStageFlags(VkShaderStageFlags input_value) +{ + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkShaderStageFlagBits(static_cast(1 << index))); + } + ++index; + input_value >>= 1; + } + if( ret.empty()) ret.append(string_VkShaderStageFlagBits(static_cast(0))); + return ret; +} + +static inline const char* string_VkCullModeFlagBits(VkCullModeFlagBits input_value) +{ + switch ((VkCullModeFlagBits)input_value) + { + case VK_CULL_MODE_BACK_BIT: + return "VK_CULL_MODE_BACK_BIT"; + case VK_CULL_MODE_FRONT_AND_BACK: + return "VK_CULL_MODE_FRONT_AND_BACK"; + case VK_CULL_MODE_FRONT_BIT: + return "VK_CULL_MODE_FRONT_BIT"; + case VK_CULL_MODE_NONE: + return "VK_CULL_MODE_NONE"; + default: + return "Unhandled VkCullModeFlagBits"; + } +} + +static inline std::string string_VkCullModeFlags(VkCullModeFlags input_value) +{ + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkCullModeFlagBits(static_cast(1 << index))); + } + ++index; + input_value >>= 1; + } + if( ret.empty()) ret.append(string_VkCullModeFlagBits(static_cast(0))); + return ret; +} + static inline const char* string_VkDynamicState(VkDynamicState input_value) { switch ((VkDynamicState)input_value) { case VK_DYNAMIC_STATE_BLEND_CONSTANTS: return "VK_DYNAMIC_STATE_BLEND_CONSTANTS"; + case VK_DYNAMIC_STATE_CULL_MODE_EXT: + return "VK_DYNAMIC_STATE_CULL_MODE_EXT"; case VK_DYNAMIC_STATE_DEPTH_BIAS: return "VK_DYNAMIC_STATE_DEPTH_BIAS"; case VK_DYNAMIC_STATE_DEPTH_BOUNDS: return "VK_DYNAMIC_STATE_DEPTH_BOUNDS"; + case VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT: + return "VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT"; + case VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT: + return "VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT"; + case VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT: + return "VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT"; + case VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT: + return "VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT"; case VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT: return "VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT"; case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV: return "VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV"; + case VK_DYNAMIC_STATE_FRONT_FACE_EXT: + return "VK_DYNAMIC_STATE_FRONT_FACE_EXT"; case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT: return "VK_DYNAMIC_STATE_LINE_STIPPLE_EXT"; case VK_DYNAMIC_STATE_LINE_WIDTH: return "VK_DYNAMIC_STATE_LINE_WIDTH"; + case VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT: + return "VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT"; case VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT: return "VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT"; case VK_DYNAMIC_STATE_SCISSOR: return "VK_DYNAMIC_STATE_SCISSOR"; + case VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT: + return "VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT"; case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK: return "VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK"; + case VK_DYNAMIC_STATE_STENCIL_OP_EXT: + return "VK_DYNAMIC_STATE_STENCIL_OP_EXT"; case VK_DYNAMIC_STATE_STENCIL_REFERENCE: return "VK_DYNAMIC_STATE_STENCIL_REFERENCE"; + case VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT: + return "VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT"; case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK: return "VK_DYNAMIC_STATE_STENCIL_WRITE_MASK"; + case VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT: + return "VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT"; case VK_DYNAMIC_STATE_VIEWPORT: return "VK_DYNAMIC_STATE_VIEWPORT"; case VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV: return "VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV"; case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV: return "VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV"; + case VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT: + return "VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT"; case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV: return "VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV"; default: @@ -2940,6 +3194,205 @@ static inline const char* string_VkDynamicState(VkDynamicState input_value) } } +static inline const char* string_VkFrontFace(VkFrontFace input_value) +{ + switch ((VkFrontFace)input_value) + { + case VK_FRONT_FACE_CLOCKWISE: + return "VK_FRONT_FACE_CLOCKWISE"; + case VK_FRONT_FACE_COUNTER_CLOCKWISE: + return "VK_FRONT_FACE_COUNTER_CLOCKWISE"; + default: + return "Unhandled VkFrontFace"; + } +} + +static inline const char* string_VkVertexInputRate(VkVertexInputRate input_value) +{ + switch ((VkVertexInputRate)input_value) + { + case VK_VERTEX_INPUT_RATE_INSTANCE: + return "VK_VERTEX_INPUT_RATE_INSTANCE"; + case VK_VERTEX_INPUT_RATE_VERTEX: + return "VK_VERTEX_INPUT_RATE_VERTEX"; + default: + return "Unhandled VkVertexInputRate"; + } +} + +static inline const char* string_VkPrimitiveTopology(VkPrimitiveTopology input_value) +{ + switch ((VkPrimitiveTopology)input_value) + { + case VK_PRIMITIVE_TOPOLOGY_LINE_LIST: + return "VK_PRIMITIVE_TOPOLOGY_LINE_LIST"; + case VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY: + return "VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY"; + case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP: + return "VK_PRIMITIVE_TOPOLOGY_LINE_STRIP"; + case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY: + return "VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY"; + case VK_PRIMITIVE_TOPOLOGY_PATCH_LIST: + return "VK_PRIMITIVE_TOPOLOGY_PATCH_LIST"; + case VK_PRIMITIVE_TOPOLOGY_POINT_LIST: + return "VK_PRIMITIVE_TOPOLOGY_POINT_LIST"; + case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN: + return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN"; + case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST: + return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST"; + case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY: + return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY"; + case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP: + return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP"; + case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY: + return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY"; + default: + return "Unhandled VkPrimitiveTopology"; + } +} + +static inline const char* string_VkPolygonMode(VkPolygonMode input_value) +{ + switch ((VkPolygonMode)input_value) + { + case VK_POLYGON_MODE_FILL: + return "VK_POLYGON_MODE_FILL"; + case VK_POLYGON_MODE_FILL_RECTANGLE_NV: + return "VK_POLYGON_MODE_FILL_RECTANGLE_NV"; + case VK_POLYGON_MODE_LINE: + return "VK_POLYGON_MODE_LINE"; + case VK_POLYGON_MODE_POINT: + return "VK_POLYGON_MODE_POINT"; + default: + return "Unhandled VkPolygonMode"; + } +} + +static inline const char* string_VkStencilOp(VkStencilOp input_value) +{ + switch ((VkStencilOp)input_value) + { + case VK_STENCIL_OP_DECREMENT_AND_CLAMP: + return "VK_STENCIL_OP_DECREMENT_AND_CLAMP"; + case VK_STENCIL_OP_DECREMENT_AND_WRAP: + return "VK_STENCIL_OP_DECREMENT_AND_WRAP"; + case VK_STENCIL_OP_INCREMENT_AND_CLAMP: + return "VK_STENCIL_OP_INCREMENT_AND_CLAMP"; + case VK_STENCIL_OP_INCREMENT_AND_WRAP: + return "VK_STENCIL_OP_INCREMENT_AND_WRAP"; + case VK_STENCIL_OP_INVERT: + return "VK_STENCIL_OP_INVERT"; + case VK_STENCIL_OP_KEEP: + return "VK_STENCIL_OP_KEEP"; + case VK_STENCIL_OP_REPLACE: + return "VK_STENCIL_OP_REPLACE"; + case VK_STENCIL_OP_ZERO: + return "VK_STENCIL_OP_ZERO"; + default: + return "Unhandled VkStencilOp"; + } +} + +static inline const char* string_VkLogicOp(VkLogicOp input_value) +{ + switch ((VkLogicOp)input_value) + { + case VK_LOGIC_OP_AND: + return "VK_LOGIC_OP_AND"; + case VK_LOGIC_OP_AND_INVERTED: + return "VK_LOGIC_OP_AND_INVERTED"; + case VK_LOGIC_OP_AND_REVERSE: + return "VK_LOGIC_OP_AND_REVERSE"; + case VK_LOGIC_OP_CLEAR: + return "VK_LOGIC_OP_CLEAR"; + case VK_LOGIC_OP_COPY: + return "VK_LOGIC_OP_COPY"; + case VK_LOGIC_OP_COPY_INVERTED: + return "VK_LOGIC_OP_COPY_INVERTED"; + case VK_LOGIC_OP_EQUIVALENT: + return "VK_LOGIC_OP_EQUIVALENT"; + case VK_LOGIC_OP_INVERT: + return "VK_LOGIC_OP_INVERT"; + case VK_LOGIC_OP_NAND: + return "VK_LOGIC_OP_NAND"; + case VK_LOGIC_OP_NOR: + return "VK_LOGIC_OP_NOR"; + case VK_LOGIC_OP_NO_OP: + return "VK_LOGIC_OP_NO_OP"; + case VK_LOGIC_OP_OR: + return "VK_LOGIC_OP_OR"; + case VK_LOGIC_OP_OR_INVERTED: + return "VK_LOGIC_OP_OR_INVERTED"; + case VK_LOGIC_OP_OR_REVERSE: + return "VK_LOGIC_OP_OR_REVERSE"; + case VK_LOGIC_OP_SET: + return "VK_LOGIC_OP_SET"; + case VK_LOGIC_OP_XOR: + return "VK_LOGIC_OP_XOR"; + default: + return "Unhandled VkLogicOp"; + } +} + +static inline const char* string_VkBorderColor(VkBorderColor input_value) +{ + switch ((VkBorderColor)input_value) + { + case VK_BORDER_COLOR_FLOAT_CUSTOM_EXT: + return "VK_BORDER_COLOR_FLOAT_CUSTOM_EXT"; + case VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK: + return "VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK"; + case VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE: + return "VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE"; + case VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK: + return "VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK"; + case VK_BORDER_COLOR_INT_CUSTOM_EXT: + return "VK_BORDER_COLOR_INT_CUSTOM_EXT"; + case VK_BORDER_COLOR_INT_OPAQUE_BLACK: + return "VK_BORDER_COLOR_INT_OPAQUE_BLACK"; + case VK_BORDER_COLOR_INT_OPAQUE_WHITE: + return "VK_BORDER_COLOR_INT_OPAQUE_WHITE"; + case VK_BORDER_COLOR_INT_TRANSPARENT_BLACK: + return "VK_BORDER_COLOR_INT_TRANSPARENT_BLACK"; + default: + return "Unhandled VkBorderColor"; + } +} + +static inline const char* string_VkFilter(VkFilter input_value) +{ + switch ((VkFilter)input_value) + { + case VK_FILTER_CUBIC_IMG: + return "VK_FILTER_CUBIC_IMG"; + case VK_FILTER_LINEAR: + return "VK_FILTER_LINEAR"; + case VK_FILTER_NEAREST: + return "VK_FILTER_NEAREST"; + default: + return "Unhandled VkFilter"; + } +} + +static inline const char* string_VkSamplerAddressMode(VkSamplerAddressMode input_value) +{ + switch ((VkSamplerAddressMode)input_value) + { + case VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER: + return "VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER"; + case VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE: + return "VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE"; + case VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT: + return "VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT"; + case VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE: + return "VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE"; + case VK_SAMPLER_ADDRESS_MODE_REPEAT: + return "VK_SAMPLER_ADDRESS_MODE_REPEAT"; + default: + return "Unhandled VkSamplerAddressMode"; + } +} + static inline const char* string_VkSamplerCreateFlagBits(VkSamplerCreateFlagBits input_value) { switch ((VkSamplerCreateFlagBits)input_value) @@ -2969,21 +3422,6 @@ static inline std::string string_VkSamplerCreateFlags(VkSamplerCreateFlags input return ret; } -static inline const char* string_VkFilter(VkFilter input_value) -{ - switch ((VkFilter)input_value) - { - case VK_FILTER_CUBIC_IMG: - return "VK_FILTER_CUBIC_IMG"; - case VK_FILTER_LINEAR: - return "VK_FILTER_LINEAR"; - case VK_FILTER_NEAREST: - return "VK_FILTER_NEAREST"; - default: - return "Unhandled VkFilter"; - } -} - static inline const char* string_VkSamplerMipmapMode(VkSamplerMipmapMode input_value) { switch ((VkSamplerMipmapMode)input_value) @@ -2997,110 +3435,6 @@ static inline const char* string_VkSamplerMipmapMode(VkSamplerMipmapMode input_v } } -static inline const char* string_VkSamplerAddressMode(VkSamplerAddressMode input_value) -{ - switch ((VkSamplerAddressMode)input_value) - { - case VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER: - return "VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER"; - case VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE: - return "VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE"; - case VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT: - return "VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT"; - case VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE: - return "VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE"; - case VK_SAMPLER_ADDRESS_MODE_REPEAT: - return "VK_SAMPLER_ADDRESS_MODE_REPEAT"; - default: - return "Unhandled VkSamplerAddressMode"; - } -} - -static inline const char* string_VkBorderColor(VkBorderColor input_value) -{ - switch ((VkBorderColor)input_value) - { - case VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK: - return "VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK"; - case VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE: - return "VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE"; - case VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK: - return "VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK"; - case VK_BORDER_COLOR_INT_OPAQUE_BLACK: - return "VK_BORDER_COLOR_INT_OPAQUE_BLACK"; - case VK_BORDER_COLOR_INT_OPAQUE_WHITE: - return "VK_BORDER_COLOR_INT_OPAQUE_WHITE"; - case VK_BORDER_COLOR_INT_TRANSPARENT_BLACK: - return "VK_BORDER_COLOR_INT_TRANSPARENT_BLACK"; - default: - return "Unhandled VkBorderColor"; - } -} - -static inline const char* string_VkDescriptorSetLayoutCreateFlagBits(VkDescriptorSetLayoutCreateFlagBits input_value) -{ - switch ((VkDescriptorSetLayoutCreateFlagBits)input_value) - { - case VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR: - return "VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR"; - case VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT: - return "VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT"; - default: - return "Unhandled VkDescriptorSetLayoutCreateFlagBits"; - } -} - -static inline std::string string_VkDescriptorSetLayoutCreateFlags(VkDescriptorSetLayoutCreateFlags input_value) -{ - std::string ret; - int index = 0; - while(input_value) { - if (input_value & 1) { - if( !ret.empty()) ret.append("|"); - ret.append(string_VkDescriptorSetLayoutCreateFlagBits(static_cast(1 << index))); - } - ++index; - input_value >>= 1; - } - if( ret.empty()) ret.append(string_VkDescriptorSetLayoutCreateFlagBits(static_cast(0))); - return ret; -} - -static inline const char* string_VkDescriptorType(VkDescriptorType input_value) -{ - switch ((VkDescriptorType)input_value) - { - case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV: - return "VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV"; - case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: - return "VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER"; - case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: - return "VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT"; - case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: - return "VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT"; - case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: - return "VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE"; - case VK_DESCRIPTOR_TYPE_SAMPLER: - return "VK_DESCRIPTOR_TYPE_SAMPLER"; - case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: - return "VK_DESCRIPTOR_TYPE_STORAGE_BUFFER"; - case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: - return "VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC"; - case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: - return "VK_DESCRIPTOR_TYPE_STORAGE_IMAGE"; - case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: - return "VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER"; - case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: - return "VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER"; - case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: - return "VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC"; - case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: - return "VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER"; - default: - return "Unhandled VkDescriptorType"; - } -} - static inline const char* string_VkDescriptorPoolCreateFlagBits(VkDescriptorPoolCreateFlagBits input_value) { switch ((VkDescriptorPoolCreateFlagBits)input_value) @@ -3130,30 +3464,67 @@ static inline std::string string_VkDescriptorPoolCreateFlags(VkDescriptorPoolCre return ret; } -static inline const char* string_VkFramebufferCreateFlagBits(VkFramebufferCreateFlagBits input_value) +static inline const char* string_VkDescriptorType(VkDescriptorType input_value) { - switch ((VkFramebufferCreateFlagBits)input_value) + switch ((VkDescriptorType)input_value) { - case VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT: - return "VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT"; + case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: + return "VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR"; + case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: + return "VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER"; + case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: + return "VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT"; + case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: + return "VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT"; + case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: + return "VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE"; + case VK_DESCRIPTOR_TYPE_SAMPLER: + return "VK_DESCRIPTOR_TYPE_SAMPLER"; + case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: + return "VK_DESCRIPTOR_TYPE_STORAGE_BUFFER"; + case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: + return "VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC"; + case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: + return "VK_DESCRIPTOR_TYPE_STORAGE_IMAGE"; + case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: + return "VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER"; + case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: + return "VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER"; + case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: + return "VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC"; + case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: + return "VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER"; default: - return "Unhandled VkFramebufferCreateFlagBits"; + return "Unhandled VkDescriptorType"; } } -static inline std::string string_VkFramebufferCreateFlags(VkFramebufferCreateFlags input_value) +static inline const char* string_VkDescriptorSetLayoutCreateFlagBits(VkDescriptorSetLayoutCreateFlagBits input_value) +{ + switch ((VkDescriptorSetLayoutCreateFlagBits)input_value) + { + case VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR: + return "VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR"; + case VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT: + return "VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT"; + default: + return "Unhandled VkDescriptorSetLayoutCreateFlagBits"; + } +} + +static inline std::string string_VkDescriptorSetLayoutCreateFlags(VkDescriptorSetLayoutCreateFlags input_value) { std::string ret; int index = 0; while(input_value) { if (input_value & 1) { if( !ret.empty()) ret.append("|"); - ret.append(string_VkFramebufferCreateFlagBits(static_cast(1 << index))); + ret.append(string_VkDescriptorSetLayoutCreateFlagBits(static_cast(1 << index))); } ++index; input_value >>= 1; } - if( ret.empty()) ret.append(string_VkFramebufferCreateFlagBits(static_cast(0))); + if( ret.empty()) ret.append(string_VkDescriptorSetLayoutCreateFlagBits(static_cast(0))); return ret; } @@ -3205,6 +3576,8 @@ static inline const char* string_VkAttachmentStoreOp(VkAttachmentStoreOp input_v { case VK_ATTACHMENT_STORE_OP_DONT_CARE: return "VK_ATTACHMENT_STORE_OP_DONT_CARE"; + case VK_ATTACHMENT_STORE_OP_NONE_QCOM: + return "VK_ATTACHMENT_STORE_OP_NONE_QCOM"; case VK_ATTACHMENT_STORE_OP_STORE: return "VK_ATTACHMENT_STORE_OP_STORE"; default: @@ -3212,131 +3585,6 @@ static inline const char* string_VkAttachmentStoreOp(VkAttachmentStoreOp input_v } } -static inline const char* string_VkSubpassDescriptionFlagBits(VkSubpassDescriptionFlagBits input_value) -{ - switch ((VkSubpassDescriptionFlagBits)input_value) - { - case VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX: - return "VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX"; - case VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX: - return "VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX"; - default: - return "Unhandled VkSubpassDescriptionFlagBits"; - } -} - -static inline std::string string_VkSubpassDescriptionFlags(VkSubpassDescriptionFlags input_value) -{ - std::string ret; - int index = 0; - while(input_value) { - if (input_value & 1) { - if( !ret.empty()) ret.append("|"); - ret.append(string_VkSubpassDescriptionFlagBits(static_cast(1 << index))); - } - ++index; - input_value >>= 1; - } - if( ret.empty()) ret.append(string_VkSubpassDescriptionFlagBits(static_cast(0))); - return ret; -} - -static inline const char* string_VkPipelineBindPoint(VkPipelineBindPoint input_value) -{ - switch ((VkPipelineBindPoint)input_value) - { - case VK_PIPELINE_BIND_POINT_COMPUTE: - return "VK_PIPELINE_BIND_POINT_COMPUTE"; - case VK_PIPELINE_BIND_POINT_GRAPHICS: - return "VK_PIPELINE_BIND_POINT_GRAPHICS"; - case VK_PIPELINE_BIND_POINT_RAY_TRACING_NV: - return "VK_PIPELINE_BIND_POINT_RAY_TRACING_NV"; - default: - return "Unhandled VkPipelineBindPoint"; - } -} - -static inline const char* string_VkAccessFlagBits(VkAccessFlagBits input_value) -{ - switch ((VkAccessFlagBits)input_value) - { - case VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV: - return "VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV"; - case VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV: - return "VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV"; - case VK_ACCESS_COLOR_ATTACHMENT_READ_BIT: - return "VK_ACCESS_COLOR_ATTACHMENT_READ_BIT"; - case VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT: - return "VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT"; - case VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT: - return "VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT"; - case VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX: - return "VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX"; - case VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX: - return "VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX"; - case VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT: - return "VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT"; - case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT: - return "VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT"; - case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT: - return "VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT"; - case VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT: - return "VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT"; - case VK_ACCESS_HOST_READ_BIT: - return "VK_ACCESS_HOST_READ_BIT"; - case VK_ACCESS_HOST_WRITE_BIT: - return "VK_ACCESS_HOST_WRITE_BIT"; - case VK_ACCESS_INDEX_READ_BIT: - return "VK_ACCESS_INDEX_READ_BIT"; - case VK_ACCESS_INDIRECT_COMMAND_READ_BIT: - return "VK_ACCESS_INDIRECT_COMMAND_READ_BIT"; - case VK_ACCESS_INPUT_ATTACHMENT_READ_BIT: - return "VK_ACCESS_INPUT_ATTACHMENT_READ_BIT"; - case VK_ACCESS_MEMORY_READ_BIT: - return "VK_ACCESS_MEMORY_READ_BIT"; - case VK_ACCESS_MEMORY_WRITE_BIT: - return "VK_ACCESS_MEMORY_WRITE_BIT"; - case VK_ACCESS_SHADER_READ_BIT: - return "VK_ACCESS_SHADER_READ_BIT"; - case VK_ACCESS_SHADER_WRITE_BIT: - return "VK_ACCESS_SHADER_WRITE_BIT"; - case VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV: - return "VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV"; - case VK_ACCESS_TRANSFER_READ_BIT: - return "VK_ACCESS_TRANSFER_READ_BIT"; - case VK_ACCESS_TRANSFER_WRITE_BIT: - return "VK_ACCESS_TRANSFER_WRITE_BIT"; - case VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT: - return "VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT"; - case VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT: - return "VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT"; - case VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT: - return "VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT"; - case VK_ACCESS_UNIFORM_READ_BIT: - return "VK_ACCESS_UNIFORM_READ_BIT"; - case VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT: - return "VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT"; - default: - return "Unhandled VkAccessFlagBits"; - } -} - -static inline std::string string_VkAccessFlags(VkAccessFlags input_value) -{ - std::string ret; - int index = 0; - while(input_value) { - if (input_value & 1) { - if( !ret.empty()) ret.append("|"); - ret.append(string_VkAccessFlagBits(static_cast(1 << index))); - } - ++index; - input_value >>= 1; - } - if( ret.empty()) ret.append(string_VkAccessFlagBits(static_cast(0))); - return ret; -} - static inline const char* string_VkDependencyFlagBits(VkDependencyFlagBits input_value) { switch ((VkDependencyFlagBits)input_value) @@ -3368,6 +3616,108 @@ static inline std::string string_VkDependencyFlags(VkDependencyFlags input_value return ret; } +static inline const char* string_VkFramebufferCreateFlagBits(VkFramebufferCreateFlagBits input_value) +{ + switch ((VkFramebufferCreateFlagBits)input_value) + { + case VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT: + return "VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT"; + default: + return "Unhandled VkFramebufferCreateFlagBits"; + } +} + +static inline std::string string_VkFramebufferCreateFlags(VkFramebufferCreateFlags input_value) +{ + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkFramebufferCreateFlagBits(static_cast(1 << index))); + } + ++index; + input_value >>= 1; + } + if( ret.empty()) ret.append(string_VkFramebufferCreateFlagBits(static_cast(0))); + return ret; +} + +static inline const char* string_VkPipelineBindPoint(VkPipelineBindPoint input_value) +{ + switch ((VkPipelineBindPoint)input_value) + { + case VK_PIPELINE_BIND_POINT_COMPUTE: + return "VK_PIPELINE_BIND_POINT_COMPUTE"; + case VK_PIPELINE_BIND_POINT_GRAPHICS: + return "VK_PIPELINE_BIND_POINT_GRAPHICS"; + case VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR: + return "VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR"; + default: + return "Unhandled VkPipelineBindPoint"; + } +} + +static inline const char* string_VkRenderPassCreateFlagBits(VkRenderPassCreateFlagBits input_value) +{ + switch ((VkRenderPassCreateFlagBits)input_value) + { + case VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM: + return "VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM"; + default: + return "Unhandled VkRenderPassCreateFlagBits"; + } +} + +static inline std::string string_VkRenderPassCreateFlags(VkRenderPassCreateFlags input_value) +{ + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkRenderPassCreateFlagBits(static_cast(1 << index))); + } + ++index; + input_value >>= 1; + } + if( ret.empty()) ret.append(string_VkRenderPassCreateFlagBits(static_cast(0))); + return ret; +} + +static inline const char* string_VkSubpassDescriptionFlagBits(VkSubpassDescriptionFlagBits input_value) +{ + switch ((VkSubpassDescriptionFlagBits)input_value) + { + case VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM: + return "VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM"; + case VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX: + return "VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX"; + case VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX: + return "VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX"; + case VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM: + return "VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM"; + default: + return "Unhandled VkSubpassDescriptionFlagBits"; + } +} + +static inline std::string string_VkSubpassDescriptionFlags(VkSubpassDescriptionFlags input_value) +{ + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkSubpassDescriptionFlagBits(static_cast(1 << index))); + } + ++index; + input_value >>= 1; + } + if( ret.empty()) ret.append(string_VkSubpassDescriptionFlagBits(static_cast(0))); + return ret; +} + static inline const char* string_VkCommandPoolCreateFlagBits(VkCommandPoolCreateFlagBits input_value) { switch ((VkCommandPoolCreateFlagBits)input_value) @@ -3524,6 +3874,23 @@ static inline std::string string_VkCommandBufferResetFlags(VkCommandBufferResetF return ret; } +static inline const char* string_VkIndexType(VkIndexType input_value) +{ + switch ((VkIndexType)input_value) + { + case VK_INDEX_TYPE_NONE_KHR: + return "VK_INDEX_TYPE_NONE_KHR"; + case VK_INDEX_TYPE_UINT16: + return "VK_INDEX_TYPE_UINT16"; + case VK_INDEX_TYPE_UINT32: + return "VK_INDEX_TYPE_UINT32"; + case VK_INDEX_TYPE_UINT8_EXT: + return "VK_INDEX_TYPE_UINT8_EXT"; + default: + return "Unhandled VkIndexType"; + } +} + static inline const char* string_VkStencilFaceFlagBits(VkStencilFaceFlagBits input_value) { switch ((VkStencilFaceFlagBits)input_value) @@ -3555,23 +3922,6 @@ static inline std::string string_VkStencilFaceFlags(VkStencilFaceFlags input_val return ret; } -static inline const char* string_VkIndexType(VkIndexType input_value) -{ - switch ((VkIndexType)input_value) - { - case VK_INDEX_TYPE_NONE_NV: - return "VK_INDEX_TYPE_NONE_NV"; - case VK_INDEX_TYPE_UINT16: - return "VK_INDEX_TYPE_UINT16"; - case VK_INDEX_TYPE_UINT32: - return "VK_INDEX_TYPE_UINT32"; - case VK_INDEX_TYPE_UINT8_EXT: - return "VK_INDEX_TYPE_UINT8_EXT"; - default: - return "Unhandled VkIndexType"; - } -} - static inline const char* string_VkSubpassContents(VkSubpassContents input_value) { switch ((VkSubpassContents)input_value) @@ -3585,108 +3935,6 @@ static inline const char* string_VkSubpassContents(VkSubpassContents input_value } } -static inline const char* string_VkObjectType(VkObjectType input_value) -{ - switch ((VkObjectType)input_value) - { - case VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV: - return "VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV"; - case VK_OBJECT_TYPE_BUFFER: - return "VK_OBJECT_TYPE_BUFFER"; - case VK_OBJECT_TYPE_BUFFER_VIEW: - return "VK_OBJECT_TYPE_BUFFER_VIEW"; - case VK_OBJECT_TYPE_COMMAND_BUFFER: - return "VK_OBJECT_TYPE_COMMAND_BUFFER"; - case VK_OBJECT_TYPE_COMMAND_POOL: - return "VK_OBJECT_TYPE_COMMAND_POOL"; - case VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT: - return "VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT"; - case VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT: - return "VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT"; - case VK_OBJECT_TYPE_DESCRIPTOR_POOL: - return "VK_OBJECT_TYPE_DESCRIPTOR_POOL"; - case VK_OBJECT_TYPE_DESCRIPTOR_SET: - return "VK_OBJECT_TYPE_DESCRIPTOR_SET"; - case VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT: - return "VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT"; - case VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE: - return "VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE"; - case VK_OBJECT_TYPE_DEVICE: - return "VK_OBJECT_TYPE_DEVICE"; - case VK_OBJECT_TYPE_DEVICE_MEMORY: - return "VK_OBJECT_TYPE_DEVICE_MEMORY"; - case VK_OBJECT_TYPE_DISPLAY_KHR: - return "VK_OBJECT_TYPE_DISPLAY_KHR"; - case VK_OBJECT_TYPE_DISPLAY_MODE_KHR: - return "VK_OBJECT_TYPE_DISPLAY_MODE_KHR"; - case VK_OBJECT_TYPE_EVENT: - return "VK_OBJECT_TYPE_EVENT"; - case VK_OBJECT_TYPE_FENCE: - return "VK_OBJECT_TYPE_FENCE"; - case VK_OBJECT_TYPE_FRAMEBUFFER: - return "VK_OBJECT_TYPE_FRAMEBUFFER"; - case VK_OBJECT_TYPE_IMAGE: - return "VK_OBJECT_TYPE_IMAGE"; - case VK_OBJECT_TYPE_IMAGE_VIEW: - return "VK_OBJECT_TYPE_IMAGE_VIEW"; - case VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX: - return "VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX"; - case VK_OBJECT_TYPE_INSTANCE: - return "VK_OBJECT_TYPE_INSTANCE"; - case VK_OBJECT_TYPE_OBJECT_TABLE_NVX: - return "VK_OBJECT_TYPE_OBJECT_TABLE_NVX"; - case VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL: - return "VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL"; - case VK_OBJECT_TYPE_PHYSICAL_DEVICE: - return "VK_OBJECT_TYPE_PHYSICAL_DEVICE"; - case VK_OBJECT_TYPE_PIPELINE: - return "VK_OBJECT_TYPE_PIPELINE"; - case VK_OBJECT_TYPE_PIPELINE_CACHE: - return "VK_OBJECT_TYPE_PIPELINE_CACHE"; - case VK_OBJECT_TYPE_PIPELINE_LAYOUT: - return "VK_OBJECT_TYPE_PIPELINE_LAYOUT"; - case VK_OBJECT_TYPE_QUERY_POOL: - return "VK_OBJECT_TYPE_QUERY_POOL"; - case VK_OBJECT_TYPE_QUEUE: - return "VK_OBJECT_TYPE_QUEUE"; - case VK_OBJECT_TYPE_RENDER_PASS: - return "VK_OBJECT_TYPE_RENDER_PASS"; - case VK_OBJECT_TYPE_SAMPLER: - return "VK_OBJECT_TYPE_SAMPLER"; - case VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION: - return "VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION"; - case VK_OBJECT_TYPE_SEMAPHORE: - return "VK_OBJECT_TYPE_SEMAPHORE"; - case VK_OBJECT_TYPE_SHADER_MODULE: - return "VK_OBJECT_TYPE_SHADER_MODULE"; - case VK_OBJECT_TYPE_SURFACE_KHR: - return "VK_OBJECT_TYPE_SURFACE_KHR"; - case VK_OBJECT_TYPE_SWAPCHAIN_KHR: - return "VK_OBJECT_TYPE_SWAPCHAIN_KHR"; - case VK_OBJECT_TYPE_UNKNOWN: - return "VK_OBJECT_TYPE_UNKNOWN"; - case VK_OBJECT_TYPE_VALIDATION_CACHE_EXT: - return "VK_OBJECT_TYPE_VALIDATION_CACHE_EXT"; - default: - return "Unhandled VkObjectType"; - } -} - -static inline const char* string_VkVendorId(VkVendorId input_value) -{ - switch ((VkVendorId)input_value) - { - case VK_VENDOR_ID_KAZAN: - return "VK_VENDOR_ID_KAZAN"; - case VK_VENDOR_ID_VIV: - return "VK_VENDOR_ID_VIV"; - case VK_VENDOR_ID_VSI: - return "VK_VENDOR_ID_VSI"; - default: - return "Unhandled VkVendorId"; - } -} - static inline const char* string_VkSubgroupFeatureFlagBits(VkSubgroupFeatureFlagBits input_value) { switch ((VkSubgroupFeatureFlagBits)input_value) @@ -4158,8 +4406,12 @@ static inline const char* string_VkDriverId(VkDriverId input_value) return "VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA"; case VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS: return "VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS"; + case VK_DRIVER_ID_MESA_LLVMPIPE: + return "VK_DRIVER_ID_MESA_LLVMPIPE"; case VK_DRIVER_ID_MESA_RADV: return "VK_DRIVER_ID_MESA_RADV"; + case VK_DRIVER_ID_MOLTENVK: + return "VK_DRIVER_ID_MOLTENVK"; case VK_DRIVER_ID_NVIDIA_PROPRIETARY: return "VK_DRIVER_ID_NVIDIA_PROPRIETARY"; case VK_DRIVER_ID_QUALCOMM_PROPRIETARY: @@ -4350,39 +4602,27 @@ static inline std::string string_VkSurfaceTransformFlagsKHR(VkSurfaceTransformFl return ret; } -static inline const char* string_VkCompositeAlphaFlagBitsKHR(VkCompositeAlphaFlagBitsKHR input_value) +static inline const char* string_VkPresentModeKHR(VkPresentModeKHR input_value) { - switch ((VkCompositeAlphaFlagBitsKHR)input_value) + switch ((VkPresentModeKHR)input_value) { - case VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR: - return "VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR"; - case VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR: - return "VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR"; - case VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR: - return "VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR"; - case VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR: - return "VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR"; + case VK_PRESENT_MODE_FIFO_KHR: + return "VK_PRESENT_MODE_FIFO_KHR"; + case VK_PRESENT_MODE_FIFO_RELAXED_KHR: + return "VK_PRESENT_MODE_FIFO_RELAXED_KHR"; + case VK_PRESENT_MODE_IMMEDIATE_KHR: + return "VK_PRESENT_MODE_IMMEDIATE_KHR"; + case VK_PRESENT_MODE_MAILBOX_KHR: + return "VK_PRESENT_MODE_MAILBOX_KHR"; + case VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR: + return "VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR"; + case VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR: + return "VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR"; default: - return "Unhandled VkCompositeAlphaFlagBitsKHR"; + return "Unhandled VkPresentModeKHR"; } } -static inline std::string string_VkCompositeAlphaFlagsKHR(VkCompositeAlphaFlagsKHR input_value) -{ - std::string ret; - int index = 0; - while(input_value) { - if (input_value & 1) { - if( !ret.empty()) ret.append("|"); - ret.append(string_VkCompositeAlphaFlagBitsKHR(static_cast(1 << index))); - } - ++index; - input_value >>= 1; - } - if( ret.empty()) ret.append(string_VkCompositeAlphaFlagBitsKHR(static_cast(0))); - return ret; -} - static inline const char* string_VkColorSpaceKHR(VkColorSpaceKHR input_value) { switch ((VkColorSpaceKHR)input_value) @@ -4424,27 +4664,39 @@ static inline const char* string_VkColorSpaceKHR(VkColorSpaceKHR input_value) } } -static inline const char* string_VkPresentModeKHR(VkPresentModeKHR input_value) +static inline const char* string_VkCompositeAlphaFlagBitsKHR(VkCompositeAlphaFlagBitsKHR input_value) { - switch ((VkPresentModeKHR)input_value) + switch ((VkCompositeAlphaFlagBitsKHR)input_value) { - case VK_PRESENT_MODE_FIFO_KHR: - return "VK_PRESENT_MODE_FIFO_KHR"; - case VK_PRESENT_MODE_FIFO_RELAXED_KHR: - return "VK_PRESENT_MODE_FIFO_RELAXED_KHR"; - case VK_PRESENT_MODE_IMMEDIATE_KHR: - return "VK_PRESENT_MODE_IMMEDIATE_KHR"; - case VK_PRESENT_MODE_MAILBOX_KHR: - return "VK_PRESENT_MODE_MAILBOX_KHR"; - case VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR: - return "VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR"; - case VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR: - return "VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR"; + case VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR: + return "VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR"; + case VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR: + return "VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR"; + case VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR: + return "VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR"; + case VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR: + return "VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR"; default: - return "Unhandled VkPresentModeKHR"; + return "Unhandled VkCompositeAlphaFlagBitsKHR"; } } +static inline std::string string_VkCompositeAlphaFlagsKHR(VkCompositeAlphaFlagsKHR input_value) +{ + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkCompositeAlphaFlagBitsKHR(static_cast(1 << index))); + } + ++index; + input_value >>= 1; + } + if( ret.empty()) ret.append(string_VkCompositeAlphaFlagBitsKHR(static_cast(0))); + return ret; +} + static inline const char* string_VkSwapchainCreateFlagBitsKHR(VkSwapchainCreateFlagBitsKHR input_value) { switch ((VkSwapchainCreateFlagBitsKHR)input_value) @@ -5066,8 +5318,12 @@ static inline const char* string_VkDriverIdKHR(VkDriverIdKHR input_value) return "VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA"; case VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS: return "VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS"; + case VK_DRIVER_ID_MESA_LLVMPIPE: + return "VK_DRIVER_ID_MESA_LLVMPIPE"; case VK_DRIVER_ID_MESA_RADV: return "VK_DRIVER_ID_MESA_RADV"; + case VK_DRIVER_ID_MOLTENVK: + return "VK_DRIVER_ID_MOLTENVK"; case VK_DRIVER_ID_NVIDIA_PROPRIETARY: return "VK_DRIVER_ID_NVIDIA_PROPRIETARY"; case VK_DRIVER_ID_QUALCOMM_PROPRIETARY: @@ -5184,12 +5440,47 @@ static inline const char* string_VkPipelineExecutableStatisticFormatKHR(VkPipeli } } +static inline const char* string_VkDebugReportFlagBitsEXT(VkDebugReportFlagBitsEXT input_value) +{ + switch ((VkDebugReportFlagBitsEXT)input_value) + { + case VK_DEBUG_REPORT_DEBUG_BIT_EXT: + return "VK_DEBUG_REPORT_DEBUG_BIT_EXT"; + case VK_DEBUG_REPORT_ERROR_BIT_EXT: + return "VK_DEBUG_REPORT_ERROR_BIT_EXT"; + case VK_DEBUG_REPORT_INFORMATION_BIT_EXT: + return "VK_DEBUG_REPORT_INFORMATION_BIT_EXT"; + case VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT: + return "VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT"; + case VK_DEBUG_REPORT_WARNING_BIT_EXT: + return "VK_DEBUG_REPORT_WARNING_BIT_EXT"; + default: + return "Unhandled VkDebugReportFlagBitsEXT"; + } +} + +static inline std::string string_VkDebugReportFlagsEXT(VkDebugReportFlagsEXT input_value) +{ + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkDebugReportFlagBitsEXT(static_cast(1 << index))); + } + ++index; + input_value >>= 1; + } + if( ret.empty()) ret.append(string_VkDebugReportFlagBitsEXT(static_cast(0))); + return ret; +} + static inline const char* string_VkDebugReportObjectTypeEXT(VkDebugReportObjectTypeEXT input_value) { switch ((VkDebugReportObjectTypeEXT)input_value) { - case VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT: - return "VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT"; case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT: return "VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT"; case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT: @@ -5226,12 +5517,8 @@ static inline const char* string_VkDebugReportObjectTypeEXT(VkDebugReportObjectT return "VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT"; case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT: return "VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT"; - case VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT: - return "VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT"; case VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT: return "VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT"; - case VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT: - return "VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT"; case VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT: return "VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT"; case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT: @@ -5267,41 +5554,6 @@ static inline const char* string_VkDebugReportObjectTypeEXT(VkDebugReportObjectT } } -static inline const char* string_VkDebugReportFlagBitsEXT(VkDebugReportFlagBitsEXT input_value) -{ - switch ((VkDebugReportFlagBitsEXT)input_value) - { - case VK_DEBUG_REPORT_DEBUG_BIT_EXT: - return "VK_DEBUG_REPORT_DEBUG_BIT_EXT"; - case VK_DEBUG_REPORT_ERROR_BIT_EXT: - return "VK_DEBUG_REPORT_ERROR_BIT_EXT"; - case VK_DEBUG_REPORT_INFORMATION_BIT_EXT: - return "VK_DEBUG_REPORT_INFORMATION_BIT_EXT"; - case VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT: - return "VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT"; - case VK_DEBUG_REPORT_WARNING_BIT_EXT: - return "VK_DEBUG_REPORT_WARNING_BIT_EXT"; - default: - return "Unhandled VkDebugReportFlagBitsEXT"; - } -} - -static inline std::string string_VkDebugReportFlagsEXT(VkDebugReportFlagsEXT input_value) -{ - std::string ret; - int index = 0; - while(input_value) { - if (input_value & 1) { - if( !ret.empty()) ret.append("|"); - ret.append(string_VkDebugReportFlagBitsEXT(static_cast(1 << index))); - } - ++index; - input_value >>= 1; - } - if( ret.empty()) ret.append(string_VkDebugReportFlagBitsEXT(static_cast(0))); - return ret; -} - static inline const char* string_VkRasterizationOrderAMD(VkRasterizationOrderAMD input_value) { switch ((VkRasterizationOrderAMD)input_value) @@ -5434,112 +5686,6 @@ static inline std::string string_VkConditionalRenderingFlagsEXT(VkConditionalRen return ret; } -static inline const char* string_VkIndirectCommandsLayoutUsageFlagBitsNVX(VkIndirectCommandsLayoutUsageFlagBitsNVX input_value) -{ - switch ((VkIndirectCommandsLayoutUsageFlagBitsNVX)input_value) - { - case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX: - return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX"; - case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX: - return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX"; - case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX: - return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX"; - case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX: - return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX"; - default: - return "Unhandled VkIndirectCommandsLayoutUsageFlagBitsNVX"; - } -} - -static inline std::string string_VkIndirectCommandsLayoutUsageFlagsNVX(VkIndirectCommandsLayoutUsageFlagsNVX input_value) -{ - std::string ret; - int index = 0; - while(input_value) { - if (input_value & 1) { - if( !ret.empty()) ret.append("|"); - ret.append(string_VkIndirectCommandsLayoutUsageFlagBitsNVX(static_cast(1 << index))); - } - ++index; - input_value >>= 1; - } - if( ret.empty()) ret.append(string_VkIndirectCommandsLayoutUsageFlagBitsNVX(static_cast(0))); - return ret; -} - -static inline const char* string_VkObjectEntryUsageFlagBitsNVX(VkObjectEntryUsageFlagBitsNVX input_value) -{ - switch ((VkObjectEntryUsageFlagBitsNVX)input_value) - { - case VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX: - return "VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX"; - case VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX: - return "VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX"; - default: - return "Unhandled VkObjectEntryUsageFlagBitsNVX"; - } -} - -static inline std::string string_VkObjectEntryUsageFlagsNVX(VkObjectEntryUsageFlagsNVX input_value) -{ - std::string ret; - int index = 0; - while(input_value) { - if (input_value & 1) { - if( !ret.empty()) ret.append("|"); - ret.append(string_VkObjectEntryUsageFlagBitsNVX(static_cast(1 << index))); - } - ++index; - input_value >>= 1; - } - if( ret.empty()) ret.append(string_VkObjectEntryUsageFlagBitsNVX(static_cast(0))); - return ret; -} - -static inline const char* string_VkIndirectCommandsTokenTypeNVX(VkIndirectCommandsTokenTypeNVX input_value) -{ - switch ((VkIndirectCommandsTokenTypeNVX)input_value) - { - case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX: - return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX"; - case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX: - return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX"; - case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX: - return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX"; - case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX: - return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX"; - case VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX: - return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX"; - case VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX: - return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX"; - case VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX: - return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX"; - case VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX: - return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX"; - default: - return "Unhandled VkIndirectCommandsTokenTypeNVX"; - } -} - -static inline const char* string_VkObjectEntryTypeNVX(VkObjectEntryTypeNVX input_value) -{ - switch ((VkObjectEntryTypeNVX)input_value) - { - case VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX: - return "VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX"; - case VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX: - return "VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX"; - case VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX: - return "VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX"; - case VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX: - return "VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX"; - case VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX: - return "VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX"; - default: - return "Unhandled VkObjectEntryTypeNVX"; - } -} - static inline const char* string_VkSurfaceCounterFlagBitsEXT(VkSurfaceCounterFlagBitsEXT input_value) { switch ((VkSurfaceCounterFlagBitsEXT)input_value) @@ -5862,16 +6008,18 @@ static inline const char* string_VkCoarseSampleOrderTypeNV(VkCoarseSampleOrderTy } } -static inline const char* string_VkAccelerationStructureTypeNV(VkAccelerationStructureTypeNV input_value) +static inline const char* string_VkRayTracingShaderGroupTypeKHR(VkRayTracingShaderGroupTypeKHR input_value) { - switch ((VkAccelerationStructureTypeNV)input_value) + switch ((VkRayTracingShaderGroupTypeKHR)input_value) { - case VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV: - return "VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV"; - case VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV: - return "VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV"; + case VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR: + return "VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR"; + case VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR: + return "VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR"; + case VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR: + return "VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR"; default: - return "Unhandled VkAccelerationStructureTypeNV"; + return "Unhandled VkRayTracingShaderGroupTypeKHR"; } } @@ -5879,38 +6027,110 @@ static inline const char* string_VkRayTracingShaderGroupTypeNV(VkRayTracingShade { switch ((VkRayTracingShaderGroupTypeNV)input_value) { - case VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV: - return "VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV"; - case VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV: - return "VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV"; - case VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV: - return "VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV"; + case VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR: + return "VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR"; + case VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR: + return "VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR"; + case VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR: + return "VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR"; default: return "Unhandled VkRayTracingShaderGroupTypeNV"; } } +static inline const char* string_VkGeometryTypeKHR(VkGeometryTypeKHR input_value) +{ + switch ((VkGeometryTypeKHR)input_value) + { + case VK_GEOMETRY_TYPE_AABBS_KHR: + return "VK_GEOMETRY_TYPE_AABBS_KHR"; + case VK_GEOMETRY_TYPE_INSTANCES_KHR: + return "VK_GEOMETRY_TYPE_INSTANCES_KHR"; + case VK_GEOMETRY_TYPE_TRIANGLES_KHR: + return "VK_GEOMETRY_TYPE_TRIANGLES_KHR"; + default: + return "Unhandled VkGeometryTypeKHR"; + } +} + static inline const char* string_VkGeometryTypeNV(VkGeometryTypeNV input_value) { switch ((VkGeometryTypeNV)input_value) { - case VK_GEOMETRY_TYPE_AABBS_NV: - return "VK_GEOMETRY_TYPE_AABBS_NV"; - case VK_GEOMETRY_TYPE_TRIANGLES_NV: - return "VK_GEOMETRY_TYPE_TRIANGLES_NV"; + case VK_GEOMETRY_TYPE_AABBS_KHR: + return "VK_GEOMETRY_TYPE_AABBS_KHR"; + case VK_GEOMETRY_TYPE_INSTANCES_KHR: + return "VK_GEOMETRY_TYPE_INSTANCES_KHR"; + case VK_GEOMETRY_TYPE_TRIANGLES_KHR: + return "VK_GEOMETRY_TYPE_TRIANGLES_KHR"; default: return "Unhandled VkGeometryTypeNV"; } } +static inline const char* string_VkAccelerationStructureTypeKHR(VkAccelerationStructureTypeKHR input_value) +{ + switch ((VkAccelerationStructureTypeKHR)input_value) + { + case VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR: + return "VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR"; + case VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR: + return "VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR"; + default: + return "Unhandled VkAccelerationStructureTypeKHR"; + } +} + +static inline const char* string_VkAccelerationStructureTypeNV(VkAccelerationStructureTypeNV input_value) +{ + switch ((VkAccelerationStructureTypeNV)input_value) + { + case VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR: + return "VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR"; + case VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR: + return "VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR"; + default: + return "Unhandled VkAccelerationStructureTypeNV"; + } +} + +static inline const char* string_VkGeometryFlagBitsKHR(VkGeometryFlagBitsKHR input_value) +{ + switch ((VkGeometryFlagBitsKHR)input_value) + { + case VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR: + return "VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR"; + case VK_GEOMETRY_OPAQUE_BIT_KHR: + return "VK_GEOMETRY_OPAQUE_BIT_KHR"; + default: + return "Unhandled VkGeometryFlagBitsKHR"; + } +} + +static inline std::string string_VkGeometryFlagsKHR(VkGeometryFlagsKHR input_value) +{ + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkGeometryFlagBitsKHR(static_cast(1 << index))); + } + ++index; + input_value >>= 1; + } + if( ret.empty()) ret.append(string_VkGeometryFlagBitsKHR(static_cast(0))); + return ret; +} + static inline const char* string_VkGeometryFlagBitsNV(VkGeometryFlagBitsNV input_value) { switch ((VkGeometryFlagBitsNV)input_value) { - case VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV: - return "VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV"; - case VK_GEOMETRY_OPAQUE_BIT_NV: - return "VK_GEOMETRY_OPAQUE_BIT_NV"; + case VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR: + return "VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR"; + case VK_GEOMETRY_OPAQUE_BIT_KHR: + return "VK_GEOMETRY_OPAQUE_BIT_KHR"; default: return "Unhandled VkGeometryFlagBitsNV"; } @@ -5932,18 +6152,51 @@ static inline std::string string_VkGeometryFlagsNV(VkGeometryFlagsNV input_value return ret; } +static inline const char* string_VkGeometryInstanceFlagBitsKHR(VkGeometryInstanceFlagBitsKHR input_value) +{ + switch ((VkGeometryInstanceFlagBitsKHR)input_value) + { + case VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR: + return "VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR"; + case VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR: + return "VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR"; + case VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR: + return "VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR"; + case VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR: + return "VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR"; + default: + return "Unhandled VkGeometryInstanceFlagBitsKHR"; + } +} + +static inline std::string string_VkGeometryInstanceFlagsKHR(VkGeometryInstanceFlagsKHR input_value) +{ + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkGeometryInstanceFlagBitsKHR(static_cast(1 << index))); + } + ++index; + input_value >>= 1; + } + if( ret.empty()) ret.append(string_VkGeometryInstanceFlagBitsKHR(static_cast(0))); + return ret; +} + static inline const char* string_VkGeometryInstanceFlagBitsNV(VkGeometryInstanceFlagBitsNV input_value) { switch ((VkGeometryInstanceFlagBitsNV)input_value) { - case VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV: - return "VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV"; - case VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV: - return "VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV"; - case VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV: - return "VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV"; - case VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV: - return "VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV"; + case VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR: + return "VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR"; + case VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR: + return "VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR"; + case VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR: + return "VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR"; + case VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR: + return "VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR"; default: return "Unhandled VkGeometryInstanceFlagBitsNV"; } @@ -5965,20 +6218,55 @@ static inline std::string string_VkGeometryInstanceFlagsNV(VkGeometryInstanceFla return ret; } +static inline const char* string_VkBuildAccelerationStructureFlagBitsKHR(VkBuildAccelerationStructureFlagBitsKHR input_value) +{ + switch ((VkBuildAccelerationStructureFlagBitsKHR)input_value) + { + case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR: + return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR"; + case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR: + return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR"; + case VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR: + return "VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR"; + case VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR: + return "VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR"; + case VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR: + return "VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR"; + default: + return "Unhandled VkBuildAccelerationStructureFlagBitsKHR"; + } +} + +static inline std::string string_VkBuildAccelerationStructureFlagsKHR(VkBuildAccelerationStructureFlagsKHR input_value) +{ + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkBuildAccelerationStructureFlagBitsKHR(static_cast(1 << index))); + } + ++index; + input_value >>= 1; + } + if( ret.empty()) ret.append(string_VkBuildAccelerationStructureFlagBitsKHR(static_cast(0))); + return ret; +} + static inline const char* string_VkBuildAccelerationStructureFlagBitsNV(VkBuildAccelerationStructureFlagBitsNV input_value) { switch ((VkBuildAccelerationStructureFlagBitsNV)input_value) { - case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV: - return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV"; - case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV: - return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV"; - case VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV: - return "VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV"; - case VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV: - return "VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV"; - case VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV: - return "VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV"; + case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR: + return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR"; + case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR: + return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR"; + case VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR: + return "VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR"; + case VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR: + return "VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR"; + case VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR: + return "VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR"; default: return "Unhandled VkBuildAccelerationStructureFlagBitsNV"; } @@ -6000,29 +6288,65 @@ static inline std::string string_VkBuildAccelerationStructureFlagsNV(VkBuildAcce return ret; } +static inline const char* string_VkCopyAccelerationStructureModeKHR(VkCopyAccelerationStructureModeKHR input_value) +{ + switch ((VkCopyAccelerationStructureModeKHR)input_value) + { + case VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR: + return "VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR"; + case VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR: + return "VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR"; + case VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR: + return "VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR"; + case VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR: + return "VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR"; + default: + return "Unhandled VkCopyAccelerationStructureModeKHR"; + } +} + static inline const char* string_VkCopyAccelerationStructureModeNV(VkCopyAccelerationStructureModeNV input_value) { switch ((VkCopyAccelerationStructureModeNV)input_value) { - case VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV: - return "VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV"; - case VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV: - return "VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV"; + case VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR: + return "VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR"; + case VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR: + return "VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR"; + case VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR: + return "VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR"; + case VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR: + return "VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR"; default: return "Unhandled VkCopyAccelerationStructureModeNV"; } } +static inline const char* string_VkAccelerationStructureMemoryRequirementsTypeKHR(VkAccelerationStructureMemoryRequirementsTypeKHR input_value) +{ + switch ((VkAccelerationStructureMemoryRequirementsTypeKHR)input_value) + { + case VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR: + return "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR"; + case VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR: + return "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR"; + case VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR: + return "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR"; + default: + return "Unhandled VkAccelerationStructureMemoryRequirementsTypeKHR"; + } +} + static inline const char* string_VkAccelerationStructureMemoryRequirementsTypeNV(VkAccelerationStructureMemoryRequirementsTypeNV input_value) { switch ((VkAccelerationStructureMemoryRequirementsTypeNV)input_value) { - case VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV: - return "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV"; - case VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV: - return "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV"; - case VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV: - return "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV"; + case VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR: + return "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR"; + case VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR: + return "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR"; + case VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR: + return "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR"; default: return "Unhandled VkAccelerationStructureMemoryRequirementsTypeNV"; } @@ -6220,10 +6544,14 @@ static inline const char* string_VkValidationFeatureEnableEXT(VkValidationFeatur { case VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT: return "VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT"; + case VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT: + return "VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT"; case VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT: return "VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT"; case VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT: return "VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT"; + case VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT: + return "VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT"; default: return "Unhandled VkValidationFeatureEnableEXT"; } @@ -6351,6 +6679,139 @@ static inline const char* string_VkLineRasterizationModeEXT(VkLineRasterizationM } } +static inline const char* string_VkIndirectStateFlagBitsNV(VkIndirectStateFlagBitsNV input_value) +{ + switch ((VkIndirectStateFlagBitsNV)input_value) + { + case VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV: + return "VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV"; + default: + return "Unhandled VkIndirectStateFlagBitsNV"; + } +} + +static inline std::string string_VkIndirectStateFlagsNV(VkIndirectStateFlagsNV input_value) +{ + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkIndirectStateFlagBitsNV(static_cast(1 << index))); + } + ++index; + input_value >>= 1; + } + if( ret.empty()) ret.append(string_VkIndirectStateFlagBitsNV(static_cast(0))); + return ret; +} + +static inline const char* string_VkIndirectCommandsTokenTypeNV(VkIndirectCommandsTokenTypeNV input_value) +{ + switch ((VkIndirectCommandsTokenTypeNV)input_value) + { + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV"; + default: + return "Unhandled VkIndirectCommandsTokenTypeNV"; + } +} + +static inline const char* string_VkIndirectCommandsLayoutUsageFlagBitsNV(VkIndirectCommandsLayoutUsageFlagBitsNV input_value) +{ + switch ((VkIndirectCommandsLayoutUsageFlagBitsNV)input_value) + { + case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV: + return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV"; + case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV: + return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV"; + case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV: + return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV"; + default: + return "Unhandled VkIndirectCommandsLayoutUsageFlagBitsNV"; + } +} + +static inline std::string string_VkIndirectCommandsLayoutUsageFlagsNV(VkIndirectCommandsLayoutUsageFlagsNV input_value) +{ + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkIndirectCommandsLayoutUsageFlagBitsNV(static_cast(1 << index))); + } + ++index; + input_value >>= 1; + } + if( ret.empty()) ret.append(string_VkIndirectCommandsLayoutUsageFlagBitsNV(static_cast(0))); + return ret; +} + +static inline const char* string_VkDeviceDiagnosticsConfigFlagBitsNV(VkDeviceDiagnosticsConfigFlagBitsNV input_value) +{ + switch ((VkDeviceDiagnosticsConfigFlagBitsNV)input_value) + { + case VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV: + return "VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV"; + case VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV: + return "VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV"; + case VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV: + return "VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV"; + default: + return "Unhandled VkDeviceDiagnosticsConfigFlagBitsNV"; + } +} + +static inline std::string string_VkDeviceDiagnosticsConfigFlagsNV(VkDeviceDiagnosticsConfigFlagsNV input_value) +{ + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkDeviceDiagnosticsConfigFlagBitsNV(static_cast(1 << index))); + } + ++index; + input_value >>= 1; + } + if( ret.empty()) ret.append(string_VkDeviceDiagnosticsConfigFlagBitsNV(static_cast(0))); + return ret; +} + + +#ifdef VK_ENABLE_BETA_EXTENSIONS + +static inline const char* string_VkAccelerationStructureBuildTypeKHR(VkAccelerationStructureBuildTypeKHR input_value) +{ + switch ((VkAccelerationStructureBuildTypeKHR)input_value) + { + case VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR: + return "VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR"; + case VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR: + return "VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR"; + case VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR: + return "VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR"; + default: + return "Unhandled VkAccelerationStructureBuildTypeKHR"; + } +} +#endif // VK_ENABLE_BETA_EXTENSIONS + static inline const char * GetPhysDevFeatureString(uint32_t index) { const char * IndexToPhysDevFeatureString[] = { "robustBufferAccess",